code
stringlengths
1
199k
""" eve.io.media ~~~~~~~~~~~~ Media storage for Eve-powered APIs. :copyright: (c) 2014 by Nicola Iarocci. :license: BSD, see LICENSE for more details. """ class MediaStorage(object): """ The MediaStorage class provides a standardized API for storing files, along with a set of default behaviors that all other storage systems can inherit or override as necessary. ..versioneadded:: 0.3 """ def __init__(self, app=None): """ :param app: the flask application (eve itself). This can be used by the class to access, amongst other things, the app.config object to retrieve class-specific settings. """ self.app = app def get(self, id_or_filename): """ Opens the file given by name or unique id. Note that although the returned file is guaranteed to be a File object, it might actually be some subclass. Returns None if no file was found. """ raise NotImplementedError def put(self, content, filename=None, content_type=None): """ Saves a new file using the storage system, preferably with the name specified. If there already exists a file with this name name, the storage system may modify the filename as necessary to get a unique name. Depending on the storage system, a unique id or the actual name of the stored file will be returned. The content type argument is used to appropriately identify the file when it is retrieved. .. versionchanged:: 0.5 Allow filename to be optional (#414). """ raise NotImplementedError def delete(self, id_or_filename): """ Deletes the file referenced by name or unique id. If deletion is not supported on the target storage system this will raise NotImplementedError instead """ raise NotImplementedError def exists(self, id_or_filename): """ Returns True if a file referenced by the given name or unique id already exists in the storage system, or False if the name is available for a new file. """ raise NotImplementedError
from django.utils.translation import ugettext as _ from django.db import models from jmbo.models import ModelBase class Superhero(ModelBase): name = models.CharField(max_length=256, editable=False) class Meta: verbose_name_plural = _("Superheroes")
''' Processor functions for images ''' import numpy as np def squeeze_image(img): ''' Return image, remove axes length 1 at end of image shape For example, an image may have shape (10,20,30,1,1). In this case squeeze will result in an image with shape (10,20,30). See doctests for further description of behavior. Parameters ---------- img : ``SpatialImage`` Returns ------- squeezed_img : ``SpatialImage`` Copy of img, such that data, and data shape have been squeezed, for dimensions > 3rd, and at the end of the shape list Examples -------- >>> import nipype.externals.pynifti as nf >>> shape = (10,20,30,1,1) >>> data = np.arange(np.prod(shape)).reshape(shape) >>> affine = np.eye(4) >>> img = nf.Nifti1Image(data, affine) >>> img.get_shape() (10, 20, 30, 1, 1) >>> img2 = squeeze_image(img) >>> img2.get_shape() (10, 20, 30) If the data are 3D then last dimensions of 1 are ignored >>> shape = (10,1,1) >>> data = np.arange(np.prod(shape)).reshape(shape) >>> img = nf.ni1.Nifti1Image(data, affine) >>> img.get_shape() (10, 1, 1) >>> img2 = squeeze_image(img) >>> img2.get_shape() (10, 1, 1) Only *final* dimensions of 1 are squeezed >>> shape = (1, 1, 5, 1, 2, 1, 1) >>> data = data.reshape(shape) >>> img = nf.ni1.Nifti1Image(data, affine) >>> img.get_shape() (1, 1, 5, 1, 2, 1, 1) >>> img2 = squeeze_image(img) >>> img2.get_shape() (1, 1, 5, 1, 2) ''' klass = img.__class__ shape = img.get_shape() slen = len(shape) if slen < 4: return klass.from_image(img) for bdim in shape[3::][::-1]: if bdim == 1: slen-=1 else: break if slen == len(shape): return klass.from_image(img) shape = shape[:slen] data = img.get_data() data = data.reshape(shape) return klass(data, img.get_affine(), img.get_header(), img.extra) def concat_images(images): ''' Concatenate images in list to single image, along last dimension ''' n_imgs = len(images) img0 = images[0] i0shape = img0.get_shape() affine = img0.get_affine() header = img0.get_header() out_shape = (n_imgs, ) + i0shape out_data = np.empty(out_shape) for i, img in enumerate(images): if not np.all(img.get_affine() == affine): raise ValueError('Affines do not match') out_data[i] = img.get_data() out_data = np.rollaxis(out_data, 0, len(i0shape)+1) klass = img0.__class__ return klass(out_data, affine, header)
from qiime2.plugin import SemanticType from ..plugin_setup import plugin from . import AlphaDiversityDirectoryFormat SampleData = SemanticType('SampleData', field_names='type') AlphaDiversity = SemanticType('AlphaDiversity', variant_of=SampleData.field['type']) plugin.register_semantic_types(SampleData, AlphaDiversity) plugin.register_semantic_type_to_format( SampleData[AlphaDiversity], artifact_format=AlphaDiversityDirectoryFormat )
from ..base import BaseTopazTest class TestMarshal(BaseTopazTest): def test_version_constants(self, space): w_res = space.execute("return Marshal::MAJOR_VERSION") assert space.int_w(w_res) == 4 w_res = space.execute("return Marshal::MINOR_VERSION") assert space.int_w(w_res) == 8 w_res = space.execute("return Marshal.dump('test')[0].ord") assert space.int_w(w_res) == 4 w_res = space.execute("return Marshal.dump('test')[1].ord") assert space.int_w(w_res) == 8 def test_dump_constants(self, space): w_res = space.execute("return Marshal.dump(nil)") assert space.str_w(w_res) == "\x04\b0" w_res = space.execute("return Marshal.dump(true)") assert space.str_w(w_res) == "\x04\bT" w_res = space.execute("return Marshal.dump(false)") assert space.str_w(w_res) == "\x04\bF" def test_load_constants(self, space): w_res = space.execute("return Marshal.load('\x04\b0')") assert w_res == space.w_nil w_res = space.execute("return Marshal.load('\x04\bT')") assert w_res == space.w_true w_res = space.execute("return Marshal.load('\x04\bF')") assert w_res == space.w_false def test_constants(self, space): w_res = space.execute("return Marshal.load(Marshal.dump(nil))") assert w_res == space.w_nil w_res = space.execute("return Marshal.load(Marshal.dump(true))") assert w_res == space.w_true w_res = space.execute("return Marshal.load(Marshal.dump(false))") assert w_res == space.w_false def test_dump_tiny_integer(self, space): w_res = space.execute("return Marshal.dump(5)") assert space.str_w(w_res) == "\x04\bi\n" w_res = space.execute("return Marshal.dump(100)") assert space.str_w(w_res) == "\x04\bii" w_res = space.execute("return Marshal.dump(0)") assert space.str_w(w_res) == "\x04\bi\x00" w_res = space.execute("return Marshal.dump(-1)") assert space.str_w(w_res) == "\x04\bi\xFA" w_res = space.execute("return Marshal.dump(-123)") assert space.str_w(w_res) == "\x04\bi\x80" w_res = space.execute("return Marshal.dump(122)") assert space.str_w(w_res) == "\x04\bi\x7F" def test_load_tiny_integer(self, space): w_res = space.execute("return Marshal.load('\x04\bi\n')") assert space.int_w(w_res) == 5 w_res = space.execute("return Marshal.load('\x04\bii')") assert space.int_w(w_res) == 100 #w_res = space.execute('return Marshal.load("\x04\bi\x00")') w_res = space.execute('return Marshal.load(Marshal.dump(0))') assert space.int_w(w_res) == 0 w_res = space.execute("return Marshal.load('\x04\bi\xFA')") assert space.int_w(w_res) == -1 w_res = space.execute("return Marshal.load('\x04\bi\x80')") assert space.int_w(w_res) == -123 w_res = space.execute("return Marshal.load('\x04\bi\x7F')") assert space.int_w(w_res) == 122 def test_dump_array(self, space): w_res = space.execute("return Marshal.dump([])") assert space.str_w(w_res) == "\x04\b[\x00" w_res = space.execute("return Marshal.dump([nil])") assert space.str_w(w_res) == "\x04\b[\x060" w_res = space.execute("return Marshal.dump([nil, true, false])") assert space.str_w(w_res) == "\x04\b[\b0TF" w_res = space.execute("return Marshal.dump([1, 2, 3])") assert space.str_w(w_res) == "\x04\b[\x08i\x06i\x07i\x08" w_res = space.execute("return Marshal.dump([1, [2, 3], 4])") assert space.str_w(w_res) == "\x04\b[\bi\x06[\ai\ai\bi\t" w_res = space.execute("return Marshal.dump([:foo, :bar])") assert space.str_w(w_res) == "\x04\b[\a:\bfoo:\bbar" def test_load_array(self, space): #w_res = space.execute("return Marshal.load('\x04\b[\x00')") w_res = space.execute("return Marshal.load(Marshal.dump([]))") assert self.unwrap(space, w_res) == [] w_res = space.execute("return Marshal.load('\x04\b[\x060')") assert self.unwrap(space, w_res) == [None] w_res = space.execute("return Marshal.load('\x04\b[\b0TF')") assert self.unwrap(space, w_res) == [None, True, False] w_res = space.execute("return Marshal.load('\x04\b[\x08i\x06i\x07i\x08')") assert self.unwrap(space, w_res) == [1, 2, 3] w_res = space.execute("return Marshal.load('\x04\b[\bi\x06[\ai\ai\bi\t')") assert self.unwrap(space, w_res) == [1, [2, 3], 4] w_res = space.execute("return Marshal.load('\x04\b[\a:\bfoo:\bbar')") assert self.unwrap(space, w_res) == ["foo", "bar"] def test_dump_symbol(self, space): w_res = space.execute("return Marshal.dump(:abc)") assert space.str_w(w_res) == "\x04\b:\babc" w_res = space.execute("return Marshal.dump(('hello' * 25).to_sym)") assert space.str_w(w_res) == "\x04\b:\x01}" + "hello" * 25 w_res = space.execute("return Marshal.dump(('hello' * 100).to_sym)") assert space.str_w(w_res) == "\x04\b:\x02\xF4\x01" + "hello" * 100 def test_load_symbol(self, space): w_res = space.execute("return Marshal.load('\x04\b:\babc')") assert space.symbol_w(w_res) == "abc" w_res = space.execute("return Marshal.load('\x04\b:\x01}' + 'hello' * 25)") assert space.symbol_w(w_res) == "hello" * 25 def test_dump_hash(self, space): w_res = space.execute("return Marshal.dump({})") assert space.str_w(w_res) == "\x04\b{\x00" w_res = space.execute("return Marshal.dump({1 => 2, 3 => 4})") assert self.unwrap(space, w_res) == "\x04\b{\ai\x06i\ai\bi\t" w_res = space.execute("return Marshal.dump({1 => {2 => 3}, 4 => 5})") assert self.unwrap(space, w_res) == "\x04\b{\ai\x06{\x06i\ai\bi\ti\n" w_res = space.execute("return Marshal.dump({1234 => {23456 => 3456789}, 4 => 5})") assert self.unwrap(space, w_res) == "\x04\b{\ai\x02\xD2\x04{\x06i\x02\xA0[i\x03\x15\xBF4i\ti\n" def test_load_hash(self, space): #w_res = space.execute("return Marshal.load('\x04\b{\x00')") w_res = space.execute("return Marshal.load(Marshal.dump({}))") assert self.unwrap(space, w_res) == {} w_res = space.execute("return Marshal.load('\x04\b{\ai\x06i\ai\bi\t')") assert self.unwrap(space, w_res) == {1: 2, 3: 4} w_res = space.execute("return Marshal.load('\x04\b{\ai\x06{\x06i\ai\bi\ti\n')") assert self.unwrap(space, w_res) == {1: {2: 3}, 4: 5} w_res = space.execute("return Marshal.load('\x04\b{\ai\x02\xD2\x04{\x06i\x02\xA0[i\x03\x15\xBF4i\ti\n')") assert self.unwrap(space, w_res) == {1234: {23456: 3456789}, 4: 5} def test_dump_integer(self, space): w_res = space.execute("return Marshal.dump(123)") assert space.str_w(w_res) == "\x04\bi\x01{" w_res = space.execute("return Marshal.dump(255)") assert space.str_w(w_res) == "\x04\bi\x01\xFF" w_res = space.execute("return Marshal.dump(256)") assert space.str_w(w_res) == "\x04\bi\x02\x00\x01" w_res = space.execute("return Marshal.dump(2 ** 16 - 2)") assert space.str_w(w_res) == "\x04\bi\x02\xFE\xFF" w_res = space.execute("return Marshal.dump(2 ** 16 - 1)") assert space.str_w(w_res) == "\x04\bi\x02\xFF\xFF" w_res = space.execute("return Marshal.dump(2 ** 16)") assert space.str_w(w_res) == "\x04\bi\x03\x00\x00\x01" w_res = space.execute("return Marshal.dump(2 ** 16 + 1)") assert space.str_w(w_res) == "\x04\bi\x03\x01\x00\x01" w_res = space.execute("return Marshal.dump(2 ** 30 - 1)") assert space.str_w(w_res) == "\x04\bi\x04\xFF\xFF\xFF?" # TODO: test tooo big numbers (they give a warning and inf) def test_load_integer(self, space): w_res = space.execute("return Marshal.load('\x04\bi\x01{')") assert space.int_w(w_res) == 123 w_res = space.execute("return Marshal.load('\x04\bi\x01\xFF')") assert space.int_w(w_res) == 255 #w_res = space.execute("return Marshal.load('\x04\bi\x02\x00\x01')") w_res = space.execute("return Marshal.load(Marshal.dump(256))") assert space.int_w(w_res) == 256 w_res = space.execute("return Marshal.load('\x04\bi\x02\xFE\xFF')") assert space.int_w(w_res) == 2 ** 16 - 2 w_res = space.execute("return Marshal.load('\x04\bi\x02\xFF\xFF')") assert space.int_w(w_res) == 2 ** 16 - 1 #w_res = space.execute("return Marshal.load('\x04\bi\x03\x00\x00\x01')") w_res = space.execute("return Marshal.load(Marshal.dump(2 ** 16))") assert space.int_w(w_res) == 2 ** 16 #w_res = space.execute("return Marshal.load('\x04\bi\x03\x01\x00\x01')") w_res = space.execute("return Marshal.load(Marshal.dump(2 ** 16 + 1))") assert space.int_w(w_res) == 2 ** 16 + 1 w_res = space.execute("return Marshal.load('\x04\bi\x04\xFF\xFF\xFF?')") assert space.int_w(w_res) == 2 ** 30 - 1 def test_dump_negative_integer(self, space): w_res = space.execute("return Marshal.dump(-1)") assert space.str_w(w_res) == "\x04\bi\xFA" w_res = space.execute("return Marshal.dump(-123)") assert space.str_w(w_res) == "\x04\bi\x80" w_res = space.execute("return Marshal.dump(-124)") assert space.str_w(w_res) == "\x04\bi\xFF\x84" w_res = space.execute("return Marshal.dump(-256)") assert space.str_w(w_res) == "\x04\bi\xFF\x00" w_res = space.execute("return Marshal.dump(-257)") assert space.str_w(w_res) == "\x04\bi\xFE\xFF\xFE" w_res = space.execute("return Marshal.dump(-(2 ** 30))") assert space.str_w(w_res) == "\x04\bi\xFC\x00\x00\x00\xC0" def test_load_negative_integer(self, space): w_res = space.execute("return Marshal.load('\x04\bi\xFA')") assert space.int_w(w_res) == -1 w_res = space.execute("return Marshal.load('\x04\bi\x80')") assert space.int_w(w_res) == -123 w_res = space.execute("return Marshal.load('\x04\bi\xFF\x84')") assert space.int_w(w_res) == -124 #w_res = space.execute("return Marshal.load('\x04\bi\xFF\x00')") w_res = space.execute("return Marshal.load(Marshal.dump(-256))") assert space.int_w(w_res) == -256 w_res = space.execute("return Marshal.load('\x04\bi\xFE\xFF\xFE')") assert space.int_w(w_res) == -257 #w_res = space.execute("return Marshal.load('\x04\bi\xFE\x00\x00')") w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 16)))") assert space.int_w(w_res) == -(2 ** 16) w_res = space.execute("return Marshal.load('\x04\bi\xFD\xFF\xFF\xFE')") assert space.int_w(w_res) == -(2 ** 16 + 1) #w_res = space.execute("return Marshal.load('\x04\bi\xFC\x00\x00\x00')") w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 24)))") assert space.int_w(w_res) == -(2 ** 24) w_res = space.execute("return Marshal.load('\x04\bi\xFC\xFF\xFF\xFF\xFE')") assert space.int_w(w_res) == -(2 ** 24 + 1) #w_res = space.execute("return Marshal.load('\x04\bi\xFC\x00\x00\x00\xC0')") w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 30)))") assert space.int_w(w_res) == -(2 ** 30) def test_dump_float(self, space): w_res = space.execute("return Marshal.dump(0.0)") assert space.str_w(w_res) == "\x04\bf\x060" w_res = space.execute("return Marshal.dump(0.1)") assert space.str_w(w_res) == "\x04\bf\b0.1" w_res = space.execute("return Marshal.dump(1.0)") assert space.str_w(w_res) == "\x04\bf\x061" w_res = space.execute("return Marshal.dump(1.1)") assert space.str_w(w_res) == "\x04\bf\b1.1" w_res = space.execute("return Marshal.dump(1.001)") assert space.str_w(w_res) == "\x04\bf\n1.001" #w_res = space.execute("return Marshal.dump(123456789.123456789)") #assert space.str_w(w_res) == "\x04\bf\x17123456789.12345679" #w_res = space.execute("return Marshal.dump(-123456789.123456789)") #assert space.str_w(w_res) == "\x04\bf\x18-123456789.12345679" #w_res = space.execute("return Marshal.dump(-0.0)") #assert space.str_w(w_res) == "\x04\bf\a-0" def test_load_float(self, space): w_res = space.execute("return Marshal.load('\x04\bf\x060')") assert space.float_w(w_res) == 0.0 w_res = space.execute("return Marshal.load('\x04\bf\b0.1')") assert space.float_w(w_res) == 0.1 w_res = space.execute("return Marshal.load('\x04\bf\x061')") assert space.float_w(w_res) == 1.0 w_res = space.execute("return Marshal.load('\x04\bf\b1.1')") assert space.float_w(w_res) == 1.1 w_res = space.execute("return Marshal.load('\x04\bf\n1.001')") assert space.float_w(w_res) == 1.001 #w_res = space.execute("return Marshal.load('\x04\bf\x17123456789.12345679')") #assert space.float_w(w_res) == 123456789.123456789 #w_res = space.execute("return Marshal.load('\x04\bf\x18-123456789.12345679')") #assert space.float_w(w_res) == -123456789.123456789 #w_res = space.execute("return Marshal.load('\x04\bf\a-0')") #assert repr(space.float_w(w_res)) == repr(-0.0) def test_dump_string(self, space): w_res = space.execute("return Marshal.dump('')") assert space.str_w(w_res) == "\x04\bI\"\x00\x06:\x06ET" w_res = space.execute("return Marshal.dump('abc')") assert space.str_w(w_res) == "\x04\bI\"\babc\x06:\x06ET" w_res = space.execute("return Marshal.dump('i am a longer string')") assert space.str_w(w_res) == "\x04\bI\"\x19i am a longer string\x06:\x06ET" def test_load_string(self, space): #w_res = space.execute("return Marshal.load('\x04\bI\"\x00\x06:\x06ET')") w_res = space.execute("return Marshal.load(Marshal.dump(''))") assert space.str_w(w_res) == "" w_res = space.execute("return Marshal.load('\x04\bI\"\babc\x06:\x06ET')") assert space.str_w(w_res) == "abc" w_res = space.execute("return Marshal.load('\x04\bI\"\x19i am a longer string\x06:\x06ET')") assert space.str_w(w_res) == "i am a longer string" def test_array(self, space): w_res = space.execute("return Marshal.load(Marshal.dump([1, 2, 3]))") assert self.unwrap(space, w_res) == [1, 2, 3] w_res = space.execute("return Marshal.load(Marshal.dump([1, [2, 3], 4]))") assert self.unwrap(space, w_res) == [1, [2, 3], 4] w_res = space.execute("return Marshal.load(Marshal.dump([130, [2, 3], 4]))") assert self.unwrap(space, w_res) == [130, [2, 3], 4] w_res = space.execute("return Marshal.load(Marshal.dump([-10000, [2, 123456], -9000]))") assert self.unwrap(space, w_res) == [-10000, [2, 123456], -9000] w_res = space.execute("return Marshal.load(Marshal.dump([:foo, :bar]))") assert self.unwrap(space, w_res) == ["foo", "bar"] w_res = space.execute("return Marshal.load(Marshal.dump(['foo', 'bar']))") assert self.unwrap(space, w_res) == ["foo", "bar"] def test_incompatible_format(self, space): with self.raises( space, "TypeError", "incompatible marshal file format (can't be read)\n" "format version 4.8 required; 97.115 given" ): space.execute("Marshal.load('asd')") def test_short_data(self, space): with self.raises(space, "ArgumentError", "marshal data too short"): space.execute("Marshal.load('')") def test_parameters(self, space): with self.raises(space, "TypeError", "instance of IO needed"): space.execute("Marshal.load(4)") def test_io(self, space, tmpdir): f = tmpdir.join("testfile") w_res = space.execute(""" Marshal.dump('hallo', File.new('%s', 'wb')) file = File.open('%s', 'rb') return Marshal.load(file.read) """ % (f, f)) assert space.str_w(w_res) == "hallo" w_res = space.execute(""" Marshal.dump('hallo', File.new('%s', 'wb')) file = File.open('%s', 'rb') return Marshal.load(file) """ % (f, f)) assert space.str_w(w_res) == "hallo"
import os import unittest import IECore import Gaffer import GafferImage import GafferImageTest class ObjectToImageTest( GafferImageTest.ImageTestCase ) : fileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checker.exr" ) negFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checkerWithNegativeDataWindow.200x150.exr" ) def test( self ) : i = IECore.Reader.create( self.fileName ).read() n = GafferImage.ObjectToImage() n["object"].setValue( i ) self.assertEqual( n["out"].image(), i ) def testImageWithANegativeDataWindow( self ) : i = IECore.Reader.create( self.negFileName ).read() n = GafferImage.ObjectToImage() n["object"].setValue( i ) self.assertEqual( n["out"].image(), i ) def testHashVariesPerTileAndChannel( self ) : n = GafferImage.ObjectToImage() n["object"].setValue( IECore.Reader.create( self.fileName ).read() ) self.assertNotEqual( n["out"].channelDataHash( "R", IECore.V2i( 0 ) ), n["out"].channelDataHash( "G", IECore.V2i( 0 ) ) ) self.assertNotEqual( n["out"].channelDataHash( "R", IECore.V2i( 0 ) ), n["out"].channelDataHash( "R", IECore.V2i( GafferImage.ImagePlug.tileSize() ) ) ) if __name__ == "__main__": unittest.main()
from __future__ import print_function import sys import os import urllib import argparse import xml.etree.ElementTree as ET def warn(*msgs): for x in msgs: print('[WARNING]:', x, file=sys.stderr) class PDBTM: def __init__(self, filename): #self.tree = ET.parse(filename) #self.root = self.tree.getroot() def strsum(l): s = '' for x in l: s += x.rstrip() + '\n' return s f = open(filename) s = [] for l in f: s.append(l) #s = strsum(s[1:-1]).strip() s = strsum(s).strip() self.root = ET.fromstring(s) print(root) def get_database(prefix='.'): if not prefix.endswith('/'): prefix += '/' print('Fetching database...', file=sys.stderr) db = urllib.urlopen('http://pdbtm.enzim.hu/data/pdbtmall') print('Saving database...', file=sys.stderr) f = open('%s/pdbtmall' % prefix, 'w') for l in db: f.write(l) #f.write(db.read()) db.close() f.close() def build_database(fn, prefix): print('Unpacking database...', file=sys.stderr) f = open(fn) db = f.read() f.close() firstline = 1 header = '' entries = [] pdbids = [] for l in db.split('\n'): if firstline: header += l firstline -= 1 continue if 'PDBTM>' in l: continue if l.startswith('<?'): continue if l.startswith('<pdbtm'): a = l.find('ID=') + 4 b = a + 4 pdbids.append(l[a:b]) entries.append(header) entries[-1] += '\n' + l if not prefix.endswith('/'): prefix += '/' if not os.path.isdir(prefix): os.mkdir(prefix) for entry in zip(pdbids, entries): f = open(prefix + entry[0] + '.xml', 'w') f.write(entry[1]) f.close() if __name__ == '__main__': parser = argparse.ArgumentParser(description='Manages PDBTM databases. Automatically fetches the PDBTM database if no options are specified. Run without any arguments, dbtool will retrieve the PDBTM database, store it in pdbtm, and unpack it.') parser.add_argument('-d', '--db', default='pdbtmall', help='name of concatenated database file {default:pdbtmall}') parser.add_argument('-b', '--build-db', action='store_true', help='(re)build database from an existing pdbtmsall file (available at http://pdbtm.enzim.hu/data/pdbtmall)') parser.add_argument('directory', nargs='?', default='pdbtm', help='directory to store database in') parser.add_argument('-f', '--force-refresh', action='store_true', help='force overwrite of existing database. Functionally equivalent to removing the old database and rerunning.') #parser.add_argument('-n', metavar='bundle_size', type=int, help='size to cut bundles into') args = parser.parse_args() if args.build_db: build_database(args.db, args.directory) else: #db = PDBTM(args.db) if not os.path.isdir(args.directory): os.mkdir(args.directory) if args.force_refresh or not os.path.isfile('%s/%s' % (args.directory, args.db)): get_database(args.directory) build_database('%s/%s' % (args.directory, args.db), args.directory) #http://pdbtm.enzim.hu/data/pdbtmall
from django.template import Library, Node, resolve_variable, TemplateSyntaxError from django.core.urlresolvers import reverse register = Library() @register.simple_tag def active(request, pattern): import re if re.search(pattern, request.get_full_path()): return 'active' return ''
''' Given a number, find the next higher number using only the digits in the given number. For example if the given number is 1234, next higher number with same digits is 1243 ''' def FindNext(num): number = str(num) length = len(number) for i in range(length-2,-1,-1): current = number[i] right = number[i+1] if current < right: temp = sorted(number[i:]) Next = temp[temp.index(current)+1] temp.remove(Next) temp = ''.join(temp) return int(number[:i]+Next+temp) return num
import requests class Status(object): SKIP_LOCALES = ['en_US'] def __init__(self, url, app=None, highlight=None): self.url = url self.app = app self.highlight = highlight or [] self.data = [] self.created = None def get_data(self): if self.data: return resp = requests.get(self.url) if resp.status_code != 200: resp.raise_for_status() self.data = resp.json() self.created = self.data[-1]['created'] def summary(self): """Generates summary data of today's state""" self.get_data() highlight = self.highlight last_item = self.data[-1] output = {} output['app'] = self.app or 'ALL' data = last_item['locales'] if self.app: get_item = lambda x: x['apps'][self.app] else: get_item = lambda x: x apps = data.items()[0][1]['apps'].keys() apps.sort() output['apps'] = apps items = [item for item in data.items() if item[0] not in highlight] hitems = [item for item in data.items() if item[0] in highlight] highlighted = [] if hitems: for loc, loc_data in sorted(hitems, key=lambda x: -x[1]['percent']): if loc in self.SKIP_LOCALES: continue item = get_item(loc_data) total = item.get('total', -1) translated = item.get('translated', -1) percent = item.get('percent', -1) untranslated_words = item.get('untranslated_words', -1) highlighted.append({ 'locale': loc, 'percent': percent, 'total': total, 'translated': translated, 'untranslated': total - translated, 'untranslated_words': untranslated_words }) output['highlighted'] = highlighted locales = [] for loc, loc_data in sorted(items, key=lambda x: -x[1]['percent']): if loc in self.SKIP_LOCALES: continue item = get_item(loc_data) total = item.get('total', -1) translated = item.get('translated', -1) percent = item.get('percent', -1) untranslated_words = item.get('untranslated_words', -1) locales.append({ 'locale': loc, 'percent': percent, 'total': total, 'translated': translated, 'untranslated': total - translated, 'untranslated_words': untranslated_words }) output['locales'] = locales output['created'] = self.created return output def _mark_movement(self, data): """For each item, converts to a tuple of (movement, item)""" ret = [] prev_day = None for i, day in enumerate(data): if i == 0: ret.append(('', day)) prev_day = day continue if prev_day > day: item = ('down', day) elif prev_day < day: item = ('up', day) else: item = ('equal', day) prev_day = day ret.append(item) return ret def history(self): self.get_data() data = self.data highlight = self.highlight app = self.app # Get a list of the locales we'll iterate through locales = sorted(data[-1]['locales'].keys()) num_days = 14 # Truncate the data to what we want to look at data = data[-num_days:] if app: get_data = lambda x: x['apps'][app]['percent'] else: get_data = lambda x: x['percent'] hlocales = [loc for loc in locales if loc in highlight] locales = [loc for loc in locales if loc not in highlight] output = {} output['app'] = self.app or 'All' output['headers'] = [item['created'] for item in data] output['highlighted'] = sorted( (loc, self._mark_movement(get_data(day['locales'][loc]) for day in data)) for loc in hlocales ) output['locales'] = sorted( (loc, self._mark_movement(get_data(day['locales'].get(loc, {'percent': 0.0})) for day in data)) for loc in locales ) output['created'] = self.created return output
"""Package contenant la commande 'débarquer'.""" from math import sqrt from primaires.interpreteur.commande.commande import Commande from secondaires.navigation.constantes import * class CmdDebarquer(Commande): """Commande 'debarquer'""" def __init__(self): """Constructeur de la commande""" Commande.__init__(self, "debarquer", "debark") self.nom_categorie = "navire" self.aide_courte = "débarque du navire" self.aide_longue = \ "Cette commande permet de débarquer du navire sur lequel " \ "on se trouve. On doit se trouver assez prêt d'une côte " \ "pour débarquer dessus." def interpreter(self, personnage, dic_masques): """Méthode d'interprétation de commande""" salle = personnage.salle if not hasattr(salle, "navire") or salle.navire is None: personnage << "|err|Vous n'êtes pas sur un navire.|ff|" return navire = salle.navire if navire.etendue is None: personnage << "|err|Vous n'êtes pas sur un navire.|ff|" return personnage.agir("bouger") # On va chercher la salle la plus proche etendue = navire.etendue # On cherche la salle de nagvire la plus proche d_salle = None # la salle de destination distance = 2 x, y, z = salle.coords.tuple() for t_salle in etendue.cotes.values(): if t_salle.coords.z == z: t_x, t_y, t_z = t_salle.coords.tuple() t_distance = sqrt((x - t_x) ** 2 + (y - t_y) ** 2) if t_distance < distance and t_salle.nom_terrain in \ TERRAINS_ACCOSTABLES: d_salle = t_salle distance = t_distance if d_salle is None: personnage << "|err|Aucun quai n'a pu être trouvé à " \ "proximité.|ff|" return personnage.salle = d_salle personnage << "Vous sautez sur {}.".format( d_salle.titre.lower()) personnage << d_salle.regarder(personnage) d_salle.envoyer("{{}} arrive en sautant depuis {}.".format( navire.nom), personnage) salle.envoyer("{{}} saute sur {}.".format( d_salle.titre.lower()), personnage) importeur.hook["personnage:deplacer"].executer( personnage, d_salle, None, 0) if not hasattr(d_salle, "navire") or d_salle.navire is None: personnage.envoyer_tip("N'oubliez pas d'amarrer votre navire " \ "avec %amarre% %amarre:attacher%.")
""" GUI threading help routines. Usage: import GtkMain # See constructor for GtkMain for options self.mygtk = GtkMain.GtkMain() # NOT THIS #gtk.main() # INSTEAD, main thread calls this: self.mygtk.mainloop() # (asynchronous call) self.mygtk.gui_do(method, arg1, arg2, ... argN, kwd1=val1, ..., kwdN=valN) # OR # (synchronous call) res = self.mygtk.gui_call(method, arg1, arg2, ... argN, kwd1=val1, ..., kwdN=valN) # To cause the GUI thread to terminate the mainloop self.mygtk.qui_quit() """ import sys, traceback import thread, threading import logging import Queue as que import gtk from ginga.misc import Task, Future class GtkMain(object): def __init__(self, queue=None, logger=None, ev_quit=None): # You can pass in a queue if you prefer to do so if not queue: queue = que.Queue() self.gui_queue = queue # You can pass in a logger if you prefer to do so if logger == None: logger = logging.getLogger('GtkHelper') self.logger = logger if not ev_quit: ev_quit = threading.Event() self.ev_quit = ev_quit self.gui_thread_id = None def update_pending(self, timeout=0.0): """Process all pending GTK events and return. _timeout_ is a tuning parameter for performance. """ # Process "out-of-band" GTK events try: while gtk.events_pending(): #gtk.main_iteration(False) gtk.main_iteration() finally: pass done = False while not done: # Process "in-band" GTK events try: future = self.gui_queue.get(block=True, timeout=timeout) # Execute the GUI method try: try: res = future.thaw(suppress_exception=False) except Exception, e: future.resolve(e) self.logger.error("gui error: %s" % str(e)) try: (type, value, tb) = sys.exc_info() tb_str = "".join(traceback.format_tb(tb)) self.logger.error("Traceback:\n%s" % (tb_str)) except Exception, e: self.logger.error("Traceback information unavailable.") finally: pass except que.Empty: done = True except Exception, e: self.logger.error("Main GUI loop error: %s" % str(e)) # Process "out-of-band" GTK events again try: while gtk.events_pending(): #gtk.main_iteration(False) gtk.main_iteration() finally: pass def gui_do(self, method, *args, **kwdargs): """General method for asynchronously calling into the GUI. It makes a future to call the given (method) with the given (args) and (kwdargs) inside the gui thread. If the calling thread is a non-gui thread the future is returned. """ future = Future.Future() future.freeze(method, *args, **kwdargs) self.gui_queue.put(future) my_id = thread.get_ident() if my_id != self.gui_thread_id: return future def gui_call(self, method, *args, **kwdargs): """General method for synchronously calling into the GUI. This waits until the method has completed before returning. """ my_id = thread.get_ident() if my_id == self.gui_thread_id: return method(*args, **kwdargs) else: future = self.gui_do(method, *args, **kwdargs) return future.wait() def gui_do_future(self, future): self.gui_queue.put(future) return future def nongui_do(self, method, *args, **kwdargs): task = Task.FuncTask(method, args, kwdargs, logger=self.logger) return self.nongui_do_task(task) def nongui_do_cb(self, tup, method, *args, **kwdargs): task = Task.FuncTask(method, args, kwdargs, logger=self.logger) task.register_callback(tup[0], args=tup[1:]) return self.nongui_do_task(task) def nongui_do_future(self, future): task = Task.FuncTask(future.thaw, (), {}, logger=self.logger) return self.nongui_do_task(task) def nongui_do_task(self, task): try: task.init_and_start(self) return task except Exception, e: self.logger.error("Error starting task: %s" % (str(e))) raise(e) def assert_gui_thread(self): my_id = thread.get_ident() assert my_id == self.gui_thread_id, \ Exception("Non-GUI thread (%d) is executing GUI code!" % ( my_id)) def assert_nongui_thread(self): my_id = thread.get_ident() assert my_id != self.gui_thread_id, \ Exception("GUI thread (%d) is executing non-GUI code!" % ( my_id)) def mainloop(self, timeout=0.001): # Mark our thread id self.gui_thread_id = thread.get_ident() while not self.ev_quit.isSet(): self.update_pending(timeout=timeout) def gui_quit(self): "Call this to cause the GUI thread to quit the mainloop.""" self.ev_quit.set()
import ctypes import logging
class Choose: """ Choose - class for choose() with callbacks """ def __init__(self, list, title, flags=0, deflt=1, icon=37): self.list = list self.title = title self.flags = flags self.x0 = -1 self.x1 = -1 self.y0 = -1 self.y1 = -1 self.width = -1 self.deflt = deflt self.icon = icon # HACK: Add a circular reference for non-modal choosers. This prevents the GC # from collecting the class object the callbacks need. Unfortunately this means # that the class will never be collected, unless refhack is set to None explicitly. if (flags & Choose2.CH_MODAL) == 0: self.refhack = self def sizer(self): """ Callback: sizer - returns the length of the list """ return len(self.list) def getl(self, n): """ Callback: getl - get one item from the list """ if n == 0: return self.title if n <= self.sizer(): return str(self.list[n-1]) else: return "<Empty>" def ins(self): pass def update(self, n): pass def edit(self, n): pass def enter(self, n): print "enter(%d) called" % n def destroy(self): pass def get_icon(self, n): pass def choose(self): """ choose - Display the choose dialogue """ old = set_script_timeout(0) n = _idaapi.choose_choose( self, self.flags, self.x0, self.y0, self.x1, self.y1, self.width, self.deflt, self.icon) set_script_timeout(old) return n
''' Production Configurations - Use djangosecure - Use Amazon's S3 for storing static files and uploaded media - Use sendgrid to send emails - Use MEMCACHIER on Heroku ''' from configurations import values try: from S3 import CallingFormat AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN except ImportError: # TODO: Fix this where even if in Dev this class is called. pass from .common import Common class Production(Common): # This ensures that Django will be able to detect a secure connection # properly on Heroku. SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # INSTALLED_APPS INSTALLED_APPS = Common.INSTALLED_APPS # END INSTALLED_APPS # SECRET KEY SECRET_KEY = values.SecretValue() # END SECRET KEY # django-secure INSTALLED_APPS += ("djangosecure", ) # set this to 60 seconds and then to 518400 when you can prove it works SECURE_HSTS_SECONDS = 60 SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True) SECURE_FRAME_DENY = values.BooleanValue(True) SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True) SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True) SESSION_COOKIE_SECURE = values.BooleanValue(False) SESSION_COOKIE_HTTPONLY = values.BooleanValue(True) SECURE_SSL_REDIRECT = values.BooleanValue(True) # end django-secure # SITE CONFIGURATION # Hosts/domain names that are valid for this site # See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts ALLOWED_HOSTS = ["*"] # END SITE CONFIGURATION INSTALLED_APPS += ("gunicorn", ) # STORAGE CONFIGURATION # See: http://django-storages.readthedocs.org/en/latest/index.html INSTALLED_APPS += ( 'storages', ) # See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' # See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings AWS_ACCESS_KEY_ID = values.SecretValue() AWS_SECRET_ACCESS_KEY = values.SecretValue() AWS_STORAGE_BUCKET_NAME = values.SecretValue() AWS_AUTO_CREATE_BUCKET = True AWS_QUERYSTRING_AUTH = False # see: https://github.com/antonagestam/collectfast AWS_PRELOAD_METADATA = True INSTALLED_APPS += ('collectfast', ) # AWS cache settings, don't change unless you know what you're doing: AWS_EXPIRY = 60 * 60 * 24 * 7 AWS_HEADERS = { 'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % ( AWS_EXPIRY, AWS_EXPIRY) } # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME # END STORAGE CONFIGURATION # EMAIL DEFAULT_FROM_EMAIL = values.Value('tco2 <noreply@example.com>') EMAIL_HOST = values.Value('smtp.sendgrid.com') EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="", environ_name="SENDGRID_PASSWORD") EMAIL_HOST_USER = values.SecretValue(environ_prefix="", environ_name="SENDGRID_USERNAME") EMAIL_PORT = values.IntegerValue(587, environ_prefix="", environ_name="EMAIL_PORT") EMAIL_SUBJECT_PREFIX = values.Value('[tco2] ', environ_name="EMAIL_SUBJECT_PREFIX") EMAIL_USE_TLS = True SERVER_EMAIL = EMAIL_HOST_USER # END EMAIL # TEMPLATE CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )), ) # END TEMPLATE CONFIGURATION # CACHING # Only do this here because thanks to django-pylibmc-sasl and pylibmc # memcacheify is painful to install on windows. try: # See: https://github.com/rdegges/django-heroku-memcacheify from memcacheify import memcacheify CACHES = memcacheify() except ImportError: CACHES = values.CacheURLValue(default="memcached://127.0.0.1:11211") # END CACHING # Your production stuff: Below this line define 3rd party library settings
import ffm import numpy as np from .base import FactorizationMachine from sklearn.utils.testing import assert_array_equal from .validation import check_array, assert_all_finite class FMRecommender(FactorizationMachine): """ Factorization Machine Recommender with pairwise (BPR) loss solver. Parameters ---------- n_iter : int, optional The number of interations of individual samples . init_stdev: float, optional Sets the stdev for the initialization of the parameter random_state: int, optional The seed of the pseudo random number generator that initializes the parameters and mcmc chain. rank: int The rank of the factorization used for the second order interactions. l2_reg_w : float L2 penalty weight for pairwise coefficients. l2_reg_V : float L2 penalty weight for linear coefficients. l2_reg : float L2 penalty weight for all coefficients (default=0). step_size : float Stepsize for the SGD solver, the solver uses a fixed step size and might require a tunning of the number of iterations `n_iter`. Attributes --------- w0_ : float bias term w_ : float | array, shape = (n_features) Coefficients for linear combination. V_ : float | array, shape = (rank_pair, n_features) Coefficients of second order factor matrix. """ def __init__(self, n_iter=100, init_stdev=0.1, rank=8, random_state=123, l2_reg_w=0.1, l2_reg_V=0.1, l2_reg=0, step_size=0.1): super(FMRecommender, self).\ __init__(n_iter=n_iter, init_stdev=init_stdev, rank=rank, random_state=random_state) if (l2_reg != 0): self.l2_reg_V = l2_reg self.l2_reg_w = l2_reg else: self.l2_reg_w = l2_reg_w self.l2_reg_V = l2_reg_V self.step_size = step_size self.task = "ranking" def fit(self, X, pairs): """ Fit model with specified loss. Parameters ---------- X : scipy.sparse.csc_matrix, (n_samples, n_features) y : float | ndarray, shape = (n_compares, 2) Each row `i` defines a pair of samples such that the first returns a high value then the second FM(X[i,0]) > FM(X[i, 1]). """ X = X.T X = check_array(X, accept_sparse="csc", dtype=np.float64) assert_all_finite(pairs) pairs = pairs.astype(np.float64) # check that pairs contain no real values assert_array_equal(pairs, pairs.astype(np.int32)) assert pairs.max() <= X.shape[1] assert pairs.min() >= 0 self.w0_, self.w_, self.V_ = ffm.ffm_fit_sgd_bpr(self, X, pairs) return self
import sys from pyface.qt import QtCore, QtGui from traits.api import Bool, Event, provides, Unicode from pyface.i_python_editor import IPythonEditor, MPythonEditor from pyface.key_pressed_event import KeyPressedEvent from pyface.widget import Widget from pyface.ui.qt4.code_editor.code_widget import AdvancedCodeWidget @provides(IPythonEditor) class PythonEditor(MPythonEditor, Widget): """ The toolkit specific implementation of a PythonEditor. See the IPythonEditor interface for the API documentation. """ #### 'IPythonEditor' interface ############################################ dirty = Bool(False) path = Unicode show_line_numbers = Bool(True) #### Events #### changed = Event key_pressed = Event(KeyPressedEvent) ########################################################################### # 'object' interface. ########################################################################### def __init__(self, parent, **traits): super(PythonEditor, self).__init__(**traits) self.control = self._create_control(parent) ########################################################################### # 'PythonEditor' interface. ########################################################################### def load(self, path=None): """ Loads the contents of the editor. """ if path is None: path = self.path # We will have no path for a new script. if len(path) > 0: f = open(self.path, 'r') text = f.read() f.close() else: text = '' self.control.code.setPlainText(text) self.dirty = False def save(self, path=None): """ Saves the contents of the editor. """ if path is None: path = self.path f = open(path, 'w') f.write(self.control.code.toPlainText()) f.close() self.dirty = False def select_line(self, lineno): """ Selects the specified line. """ self.control.code.set_line_column(lineno, 0) self.control.code.moveCursor(QtGui.QTextCursor.EndOfLine, QtGui.QTextCursor.KeepAnchor) ########################################################################### # Trait handlers. ########################################################################### def _path_changed(self): self._changed_path() def _show_line_numbers_changed(self): if self.control is not None: self.control.code.line_number_widget.setVisible( self.show_line_numbers) self.control.code.update_line_number_width() ########################################################################### # Private interface. ########################################################################### def _create_control(self, parent): """ Creates the toolkit-specific control for the widget. """ self.control = control = AdvancedCodeWidget(parent) self._show_line_numbers_changed() # Install event filter to trap key presses. event_filter = PythonEditorEventFilter(self, self.control) self.control.installEventFilter(event_filter) self.control.code.installEventFilter(event_filter) # Connect signals for text changes. control.code.modificationChanged.connect(self._on_dirty_changed) control.code.textChanged.connect(self._on_text_changed) # Load the editor's contents. self.load() return control def _on_dirty_changed(self, dirty): """ Called whenever a change is made to the dirty state of the document. """ self.dirty = dirty def _on_text_changed(self): """ Called whenever a change is made to the text of the document. """ self.changed = True class PythonEditorEventFilter(QtCore.QObject): """ A thin wrapper around the advanced code widget to handle the key_pressed Event. """ def __init__(self, editor, parent): super(PythonEditorEventFilter, self).__init__(parent) self.__editor = editor def eventFilter(self, obj, event): """ Reimplemented to trap key presses. """ if self.__editor.control and obj == self.__editor.control and \ event.type() == QtCore.QEvent.FocusOut: # Hack for Traits UI compatibility. self.__editor.control.emit(QtCore.SIGNAL('lostFocus')) elif self.__editor.control and obj == self.__editor.control.code and \ event.type() == QtCore.QEvent.KeyPress: # Pyface doesn't seem to be Unicode aware. Only keep the key code # if it corresponds to a single Latin1 character. kstr = event.text() try: kcode = ord(str(kstr)) except: kcode = 0 mods = event.modifiers() self.key_pressed = KeyPressedEvent( alt_down = ((mods & QtCore.Qt.AltModifier) == QtCore.Qt.AltModifier), control_down = ((mods & QtCore.Qt.ControlModifier) == QtCore.Qt.ControlModifier), shift_down = ((mods & QtCore.Qt.ShiftModifier) == QtCore.Qt.ShiftModifier), key_code = kcode, event = event) return super(PythonEditorEventFilter, self).eventFilter(obj, event)
import argparse import glob import hashlib import json import os IRMAS_INDEX_PATH = '../mirdata/indexes/irmas_index.json' def md5(file_path): """Get md5 hash of a file. Parameters ---------- file_path: str File path. Returns ------- md5_hash: str md5 hash of data in file_path """ hash_md5 = hashlib.md5() with open(file_path, 'rb') as fhandle: for chunk in iter(lambda: fhandle.read(4096), b''): hash_md5.update(chunk) return hash_md5.hexdigest() def strip_first_dir(full_path): return os.path.join(*(full_path.split(os.path.sep)[1:])) def make_irmas_index(irmas_data_path): count = 0 irmas_dict = dict() for root, dirs, files in os.walk(irmas_data_path): for directory in dirs: if 'Train' in directory: for root_, dirs_, files_ in os.walk( os.path.join(irmas_data_path, directory) ): for directory_ in dirs_: for root__, dirs__, files__ in os.walk( os.path.join(irmas_data_path, directory, directory_) ): for file in files__: if file.endswith('.wav'): if 'dru' in file: irmas_id_dru = file.split(']')[3] # Obtain id irmas_id_dru_no_wav = irmas_id_dru.split('.')[ 0 ] # Obtain id without '.wav' irmas_dict[irmas_id_dru_no_wav] = os.path.join( directory, directory_, file ) if 'nod' in file: irmas_id_nod = file.split(']')[3] # Obtain id irmas_id_nod_no_wav = irmas_id_nod.split('.')[ 0 ] # Obtain id without '.wav' irmas_dict[irmas_id_nod_no_wav] = os.path.join( directory, directory_, file ) else: irmas_id = file.split(']')[2] # Obtain id irmas_id_no_wav = irmas_id.split('.')[ 0 ] # Obtain id without '.wav' irmas_dict[irmas_id_no_wav] = os.path.join( directory, directory_, file ) irmas_test_dict = dict() for root, dirs, files in os.walk(irmas_data_path): for directory in dirs: if 'Test' in directory: for root_, dirs_, files_ in os.walk( os.path.join(irmas_data_path, directory) ): for directory_ in dirs_: for root__, dirs__, files__ in os.walk( os.path.join(irmas_data_path, directory, directory_) ): for file in files__: if file.endswith('.wav'): file_name = os.path.join( directory, directory_, file ) track_name = str(file_name.split('.wa')[0]) + '.txt' irmas_test_dict[count] = [file_name, track_name] count += 1 irmas_id_list = sorted(irmas_dict.items()) # Sort strokes by id irmas_index = {} for inst in irmas_id_list: print(inst[1]) audio_checksum = md5(os.path.join(irmas_data_path, inst[1])) irmas_index[inst[0]] = { 'audio': (inst[1], audio_checksum), 'annotation': (inst[1], audio_checksum), } index = 1 for inst in irmas_test_dict.values(): audio_checksum = md5(os.path.join(irmas_data_path, inst[0])) annotation_checksum = md5(os.path.join(irmas_data_path, inst[1])) irmas_index[index] = { 'audio': (inst[0], audio_checksum), 'annotation': (inst[1], annotation_checksum), } index += 1 with open(IRMAS_INDEX_PATH, 'w') as fhandle: json.dump(irmas_index, fhandle, indent=2) def make_irmas_test_index(irmas_data_path): count = 1 irmas_dict = dict() for root, dirs, files in os.walk(irmas_data_path): for directory in dirs: if 'Test' in directory: for root_, dirs_, files_ in os.walk( os.path.join(irmas_data_path, directory) ): for directory_ in dirs_: for root__, dirs__, files__ in os.walk( os.path.join(irmas_data_path, directory, directory_) ): for file in files__: if file.endswith('.wav'): file_name = os.path.join( directory, directory_, file ) track_name = str(file_name.split('.wa')[0]) + '.txt' irmas_dict[count] = [file_name, track_name] count += 1 irmas_index = {} index = 1 for inst in irmas_dict.values(): audio_checksum = md5(os.path.join(irmas_data_path, inst[0])) annotation_checksum = md5(os.path.join(irmas_data_path, inst[1])) irmas_index[index] = { 'audio': (inst[0], audio_checksum), 'annotation': (inst[1], annotation_checksum), } index += 1 with open(IRMAS_TEST_INDEX_PATH, 'w') as fhandle: json.dump(irmas_index, fhandle, indent=2) def main(args): make_irmas_index(args.irmas_data_path) # make_irmas_test_index(args.irmas_data_path) if __name__ == '__main__': PARSER = argparse.ArgumentParser(description='Make IRMAS index file.') PARSER.add_argument('irmas_data_path', type=str, help='Path to IRMAS data folder.') main(PARSER.parse_args())
import unittest import time import pprint import logging import scanner.logSetup as logSetup import pyximport print("Have Cython") pyximport.install() import dbPhashApi class TestCompareDatabaseInterface(unittest.TestCase): def __init__(self, *args, **kwargs): logSetup.initLogging() super().__init__(*args, **kwargs) def setUp(self): # We set up and tear down the tree a few times to validate the dropTree function self.log = logging.getLogger("Main.TestCompareDatabaseInterface") self.tree = dbPhashApi.PhashDbApi() self.tree.forceReload() def dist_check(self, distance, dbid, phash): qtime1 = time.time() have1 = self.tree.getWithinDistance_db(phash, distance=distance) qtime2 = time.time() qtime3 = time.time() have2 = self.tree.getIdsWithinDistance(phash, distance=distance) qtime4 = time.time() # print(dbid, have1) if have1 != have2: self.log.error("Mismatch!") for line in pprint.pformat(have1).split("\n"): self.log.error(line) for line in pprint.pformat(have2).split("\n"): self.log.error(line) self.assertTrue(dbid in have1) self.assertTrue(dbid in have2) self.assertEqual(have1, have2) self.log.info('Dist %s %s, %s', distance, qtime2-qtime1, qtime4-qtime3) def test_0(self): rand_r = self.tree.getRandomPhashRows(0.001) self.log.info("Have %s items to test with", len(rand_r)) stepno = 0 for dbid, phash in rand_r: self.dist_check(1, dbid, phash) self.dist_check(2, dbid, phash) self.dist_check(3, dbid, phash) self.dist_check(4, dbid, phash) self.dist_check(5, dbid, phash) self.dist_check(6, dbid, phash) self.dist_check(7, dbid, phash) self.dist_check(8, dbid, phash) stepno += 1 self.log.info("On step %s of %s", stepno, len(rand_r))
from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi from pyamg.util.linalg import norm import pyamg from scipy.optimize import fminbound, fmin __all__ = ['one_D_helmholtz', 'min_wave'] def min_wave(A, omega, x, tol=1e-5, maxiter=25): ''' parameters ---------- A {matrix} 1D Helmholtz Operator omega {scalar} Wavenumber used to discretize Helmholtz problem x {array} 1D mesh for the problem tol {scalar} minimization tolerance maxit {integer} maximum iters for minimization algorithm returns ------- Applies minimization algorithm to find numerically lowest energy wavenumber for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>, for c = cosine((omega+shift)x) ''' x = ravel(x) # Define scalar objective function, ignoring the # boundaries by only considering A*c at [1:-1] def obj_fcn(alpha): c = cos((omega+alpha)*x) Ac = (A*c)[1:-1] return norm(Ac)/norm(c[1:-1]) (xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \ 0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0) #print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\ # Number of function evals = %d" % (xopt, fval, ierr, numfunc) return xopt def one_D_helmholtz(h, omega=1.0, nplane_waves=2): ''' parameters ---------- h {int} Number of grid spacings for 1-D Helmholtz omega {float} Defines Helmholtz wave number nplane_waves {int} Defines the number of planewaves used for the near null-space modes, B. 1: B = [ exp(ikx) ] 2: B = [ real(exp(ikx)), complex(exp(ikx)) ] returns ------- dictionary containing: A {matrix-like} LHS of linear system for Helmholtz problem, -laplace(u) - omega^2 u = f mesh_h {float} mesh size vertices {array-like} [X, Y] elements {None} None, just using 1-D finite-differencing ''' # Ensure Repeatability of "random" initial guess random.seed(10) # Mesh Spacing mesh_h = 1.0/(float(h)-1.0) # Construct Real Operator reA = pyamg.gallery.poisson( (h,), format='csr') reA = reA - mesh_h*mesh_h*omega*omega*\ eye(reA.shape[0], reA.shape[1], format='csr') dimen = reA.shape[0] # Construct Imaginary Operator imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \ (array([0]), array([0]))), shape=reA.shape) ) # Enforce Radiation Boundary Conditions at first grid point reA.data[1] = -2.0 # In order to maintain symmetry scale the first equation by 1/2 reA.data[0] = 0.5*reA.data[0] reA.data[1] = 0.5*reA.data[1] imA.data[0] = 0.5*imA.data[0] # Create complex-valued system complexA = reA + 1.0j*imA # For this case, the CG (continuous Galerkin) case is the default elements and vertices # because there is no DG mesh to speak of elements = None vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1)))) # Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)] B = zeros( (dimen, nplane_waves), dtype=complex ) shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15) if nplane_waves == 1: B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0]) elif nplane_waves == 2: B[:,0] = cos((omega+shift)*vertices[:,0]) B[:,1] = sin((omega+shift)*vertices[:,0]) return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \ 'elements' : elements, 'vertices' : vertices}
from bluebottle.projects.serializers import ProjectPreviewSerializer from bluebottle.quotes.serializers import QuoteSerializer from bluebottle.slides.serializers import SlideSerializer from bluebottle.statistics.serializers import StatisticSerializer from rest_framework import serializers class HomePageSerializer(serializers.Serializer): id = serializers.CharField() quotes = QuoteSerializer(many=True) slides = SlideSerializer(many=True) statistics = StatisticSerializer(many=True) projects = ProjectPreviewSerializer(many=True)
"""Package contenant la commande 'scripting alerte info'.""" from primaires.interpreteur.masque.parametre import Parametre from primaires.format.fonctions import echapper_accolades from primaires.format.date import get_date class PrmInfo(Parametre): """Commande 'scripting alerte info'""" def __init__(self): """Constructeur du paramètre.""" Parametre.__init__(self, "info", "info") self.schema = "<nombre>" self.aide_courte = "affiche des informations sur l'alerte" self.aide_longue = \ "Affiche des informations sur l'alerte permettant de la corriger." def interpreter(self, personnage, dic_masques): """Méthode d'interprétation de commande""" nombre = dic_masques["nombre"].nombre try: alerte = type(self).importeur.scripting.alertes[nombre] except KeyError: personnage << "|err|Ce numéro d'alerte est invalide.|ff|" else: msg = "Informations sur l'alerte {} :".format(alerte.no) msg += "\n S'est produit sur {} {}".format(alerte.type, alerte.objet) + " " + get_date(alerte.date.timetuple()) msg += "\n Evenement {}, test {}, ligne {}".format( alerte.evenement, echapper_accolades(alerte.test), alerte.no_ligne) msg += "\n {}\n".format(echapper_accolades(alerte.ligne)) msg += "\n Message d'erreur : |err|{}|ff|".format( echapper_accolades(alerte.message)) if personnage.nom_groupe == "administrateur": msg += "\n Traceback Python :\n {}".format( echapper_accolades(alerte.traceback)) personnage << msg
def helloworld(): """ Hello world routine ! """ print("Hello world!")
import os import os.path as op import pytest import numpy as np from numpy.testing import (assert_array_equal, assert_equal, assert_allclose, assert_array_less, assert_almost_equal) import itertools import mne from mne.datasets import testing from mne.fixes import _get_img_fdata from mne import read_trans, write_trans from mne.io import read_info from mne.transforms import (invert_transform, _get_trans, rotation, rotation3d, rotation_angles, _find_trans, combine_transforms, apply_trans, translation, get_ras_to_neuromag_trans, _pol_to_cart, quat_to_rot, rot_to_quat, _angle_between_quats, _find_vector_rotation, _sph_to_cart, _cart_to_sph, _topo_to_sph, _average_quats, _SphericalSurfaceWarp as SphericalSurfaceWarp, rotation3d_align_z_axis, _read_fs_xfm, _write_fs_xfm, _quat_real, _fit_matched_points, _quat_to_euler, _euler_to_quat, _quat_to_affine, _compute_r2, _validate_pipeline) from mne.utils import requires_nibabel, requires_dipy data_path = testing.data_path(download=False) fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-trans.fif') fname_eve = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw-eve.fif') subjects_dir = op.join(data_path, 'subjects') fname_t1 = op.join(subjects_dir, 'fsaverage', 'mri', 'T1.mgz') base_dir = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data') fname_trans = op.join(base_dir, 'sample-audvis-raw-trans.txt') test_fif_fname = op.join(base_dir, 'test_raw.fif') ctf_fname = op.join(base_dir, 'test_ctf_raw.fif') hp_fif_fname = op.join(base_dir, 'test_chpi_raw_sss.fif') def test_tps(): """Test TPS warping.""" az = np.linspace(0., 2 * np.pi, 20, endpoint=False) pol = np.linspace(0, np.pi, 12)[1:-1] sph = np.array(np.meshgrid(1, az, pol, indexing='ij')) sph.shape = (3, -1) assert_equal(sph.shape[1], 200) source = _sph_to_cart(sph.T) destination = source.copy() destination *= 2 destination[:, 0] += 1 # fit with 100 points warp = SphericalSurfaceWarp() assert 'no ' in repr(warp) warp.fit(source[::3], destination[::2]) assert 'oct5' in repr(warp) destination_est = warp.transform(source) assert_allclose(destination_est, destination, atol=1e-3) @testing.requires_testing_data def test_get_trans(): """Test converting '-trans.txt' to '-trans.fif'.""" trans = read_trans(fname) trans = invert_transform(trans) # starts out as head->MRI, so invert trans_2 = _get_trans(fname_trans)[0] assert trans.__eq__(trans_2, atol=1e-5) @testing.requires_testing_data def test_io_trans(tmpdir): """Test reading and writing of trans files.""" tempdir = str(tmpdir) os.mkdir(op.join(tempdir, 'sample')) pytest.raises(RuntimeError, _find_trans, 'sample', subjects_dir=tempdir) trans0 = read_trans(fname) fname1 = op.join(tempdir, 'sample', 'test-trans.fif') trans0.save(fname1) assert fname1 == _find_trans('sample', subjects_dir=tempdir) trans1 = read_trans(fname1) # check all properties assert trans0 == trans1 # check reading non -trans.fif files pytest.raises(IOError, read_trans, fname_eve) # check warning on bad filenames fname2 = op.join(tempdir, 'trans-test-bad-name.fif') with pytest.warns(RuntimeWarning, match='-trans.fif'): write_trans(fname2, trans0) def test_get_ras_to_neuromag_trans(): """Test the coordinate transformation from ras to neuromag.""" # create model points in neuromag-like space rng = np.random.RandomState(0) anterior = [0, 1, 0] left = [-1, 0, 0] right = [.8, 0, 0] up = [0, 0, 1] rand_pts = rng.uniform(-1, 1, (3, 3)) pts = np.vstack((anterior, left, right, up, rand_pts)) # change coord system rx, ry, rz, tx, ty, tz = rng.uniform(-2 * np.pi, 2 * np.pi, 6) trans = np.dot(translation(tx, ty, tz), rotation(rx, ry, rz)) pts_changed = apply_trans(trans, pts) # transform back into original space nas, lpa, rpa = pts_changed[:3] hsp_trans = get_ras_to_neuromag_trans(nas, lpa, rpa) pts_restored = apply_trans(hsp_trans, pts_changed) err = "Neuromag transformation failed" assert_allclose(pts_restored, pts, atol=1e-6, err_msg=err) def _cartesian_to_sphere(x, y, z): """Convert using old function.""" hypotxy = np.hypot(x, y) r = np.hypot(hypotxy, z) elev = np.arctan2(z, hypotxy) az = np.arctan2(y, x) return az, elev, r def _sphere_to_cartesian(theta, phi, r): """Convert using old function.""" z = r * np.sin(phi) rcos_phi = r * np.cos(phi) x = rcos_phi * np.cos(theta) y = rcos_phi * np.sin(theta) return x, y, z def test_sph_to_cart(): """Test conversion between sphere and cartesian.""" # Simple test, expected value (11, 0, 0) r, theta, phi = 11., 0., np.pi / 2. z = r * np.cos(phi) rsin_phi = r * np.sin(phi) x = rsin_phi * np.cos(theta) y = rsin_phi * np.sin(theta) coord = _sph_to_cart(np.array([[r, theta, phi]]))[0] assert_allclose(coord, (x, y, z), atol=1e-7) assert_allclose(coord, (r, 0, 0), atol=1e-7) rng = np.random.RandomState(0) # round-trip test coords = rng.randn(10, 3) assert_allclose(_sph_to_cart(_cart_to_sph(coords)), coords, atol=1e-5) # equivalence tests to old versions for coord in coords: sph = _cart_to_sph(coord[np.newaxis]) cart = _sph_to_cart(sph) sph_old = np.array(_cartesian_to_sphere(*coord)) cart_old = _sphere_to_cartesian(*sph_old) sph_old[1] = np.pi / 2. - sph_old[1] # new convention assert_allclose(sph[0], sph_old[[2, 0, 1]], atol=1e-7) assert_allclose(cart[0], cart_old, atol=1e-7) assert_allclose(cart[0], coord, atol=1e-7) def _polar_to_cartesian(theta, r): """Transform polar coordinates to cartesian.""" x = r * np.cos(theta) y = r * np.sin(theta) return x, y def test_polar_to_cartesian(): """Test helper transform function from polar to cartesian.""" r = 1 theta = np.pi # expected values are (-1, 0) x = r * np.cos(theta) y = r * np.sin(theta) coord = _pol_to_cart(np.array([[r, theta]]))[0] # np.pi is an approx since pi is irrational assert_allclose(coord, (x, y), atol=1e-7) assert_allclose(coord, (-1, 0), atol=1e-7) assert_allclose(coord, _polar_to_cartesian(theta, r), atol=1e-7) rng = np.random.RandomState(0) r = rng.randn(10) theta = rng.rand(10) * (2 * np.pi) polar = np.array((r, theta)).T assert_allclose([_polar_to_cartesian(p[1], p[0]) for p in polar], _pol_to_cart(polar), atol=1e-7) def _topo_to_phi_theta(theta, radius): """Convert using old function.""" sph_phi = (0.5 - radius) * 180 sph_theta = -theta return sph_phi, sph_theta def test_topo_to_sph(): """Test topo to sphere conversion.""" rng = np.random.RandomState(0) angles = rng.rand(10) * 360 radii = rng.rand(10) angles[0] = 30 radii[0] = 0.25 # new way sph = _topo_to_sph(np.array([angles, radii]).T) new = _sph_to_cart(sph) new[:, [0, 1]] = new[:, [1, 0]] * [-1, 1] # old way for ii, (angle, radius) in enumerate(zip(angles, radii)): sph_phi, sph_theta = _topo_to_phi_theta(angle, radius) if ii == 0: assert_allclose(_topo_to_phi_theta(angle, radius), [45, -30]) azimuth = sph_theta / 180.0 * np.pi elevation = sph_phi / 180.0 * np.pi assert_allclose(sph[ii], [1., azimuth, np.pi / 2. - elevation], atol=1e-7) r = np.ones_like(radius) x, y, z = _sphere_to_cartesian(azimuth, elevation, r) pos = [-y, x, z] if ii == 0: expected = np.array([1. / 2., np.sqrt(3) / 2., 1.]) expected /= np.sqrt(2) assert_allclose(pos, expected, atol=1e-7) assert_allclose(pos, new[ii], atol=1e-7) def test_rotation(): """Test conversion between rotation angles and transformation matrix.""" tests = [(0, 0, 1), (.5, .5, .5), (np.pi, 0, -1.5)] for rot in tests: x, y, z = rot m = rotation3d(x, y, z) m4 = rotation(x, y, z) assert_array_equal(m, m4[:3, :3]) back = rotation_angles(m) assert_almost_equal(actual=back, desired=rot, decimal=12) back4 = rotation_angles(m4) assert_almost_equal(actual=back4, desired=rot, decimal=12) def test_rotation3d_align_z_axis(): """Test rotation3d_align_z_axis.""" # The more complex z axis fails the assert presumably due to tolerance # inp_zs = [[0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 0, -1], [-0.75071668, -0.62183808, 0.22302888]] exp_res = [[[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]], [[1., 0., 0.], [0., 0., 1.], [0., -1., 0.]], [[0., 0., 1.], [0., 1., 0.], [-1., 0., 0.]], [[1., 0., 0.], [0., -1., 0.], [0., 0., -1.]], [[0.53919688, -0.38169517, -0.75071668], [-0.38169517, 0.683832, -0.62183808], [0.75071668, 0.62183808, 0.22302888]]] for res, z in zip(exp_res, inp_zs): assert_allclose(res, rotation3d_align_z_axis(z), atol=1e-7) @testing.requires_testing_data def test_combine(): """Test combining transforms.""" trans = read_trans(fname) inv = invert_transform(trans) combine_transforms(trans, inv, trans['from'], trans['from']) pytest.raises(RuntimeError, combine_transforms, trans, inv, trans['to'], trans['from']) pytest.raises(RuntimeError, combine_transforms, trans, inv, trans['from'], trans['to']) pytest.raises(RuntimeError, combine_transforms, trans, trans, trans['from'], trans['to']) def test_quaternions(): """Test quaternion calculations.""" rots = [np.eye(3)] for fname in [test_fif_fname, ctf_fname, hp_fif_fname]: rots += [read_info(fname)['dev_head_t']['trans'][:3, :3]] # nasty numerical cases rots += [np.array([ [-0.99978541, -0.01873462, -0.00898756], [-0.01873462, 0.62565561, 0.77987608], [-0.00898756, 0.77987608, -0.62587152], ])] rots += [np.array([ [0.62565561, -0.01873462, 0.77987608], [-0.01873462, -0.99978541, -0.00898756], [0.77987608, -0.00898756, -0.62587152], ])] rots += [np.array([ [-0.99978541, -0.00898756, -0.01873462], [-0.00898756, -0.62587152, 0.77987608], [-0.01873462, 0.77987608, 0.62565561], ])] for rot in rots: assert_allclose(rot, quat_to_rot(rot_to_quat(rot)), rtol=1e-5, atol=1e-5) rot = rot[np.newaxis, np.newaxis, :, :] assert_allclose(rot, quat_to_rot(rot_to_quat(rot)), rtol=1e-5, atol=1e-5) # let's make sure our angle function works in some reasonable way for ii in range(3): for jj in range(3): a = np.zeros(3) b = np.zeros(3) a[ii] = 1. b[jj] = 1. expected = np.pi if ii != jj else 0. assert_allclose(_angle_between_quats(a, b), expected, atol=1e-5) y_180 = np.array([[-1, 0, 0], [0, 1, 0], [0, 0, -1.]]) assert_allclose(_angle_between_quats(rot_to_quat(y_180), np.zeros(3)), np.pi) h_180_attitude_90 = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1.]]) assert_allclose(_angle_between_quats(rot_to_quat(h_180_attitude_90), np.zeros(3)), np.pi) def test_vector_rotation(): """Test basic rotation matrix math.""" x = np.array([1., 0., 0.]) y = np.array([0., 1., 0.]) rot = _find_vector_rotation(x, y) assert_array_equal(rot, [[0, -1, 0], [1, 0, 0], [0, 0, 1]]) quat_1 = rot_to_quat(rot) quat_2 = rot_to_quat(np.eye(3)) assert_allclose(_angle_between_quats(quat_1, quat_2), np.pi / 2.) def test_average_quats(): """Test averaging of quaternions.""" sq2 = 1. / np.sqrt(2.) quats = np.array([[0, sq2, sq2], [0, sq2, sq2], [0, sq2, 0], [0, 0, sq2], [sq2, 0, 0]], float) # In MATLAB: # quats = [[0, sq2, sq2, 0]; [0, sq2, sq2, 0]; # [0, sq2, 0, sq2]; [0, 0, sq2, sq2]; [sq2, 0, 0, sq2]]; expected = [quats[0], quats[0], [0, 0.788675134594813, 0.577350269189626], [0, 0.657192299694123, 0.657192299694123], [0.100406058540540, 0.616329446922803, 0.616329446922803]] # Averaging the first two should give the same thing: for lim, ex in enumerate(expected): assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7) quats[1] *= -1 # same quaternion (hidden value is zero here)! rot_0, rot_1 = quat_to_rot(quats[:2]) assert_allclose(rot_0, rot_1, atol=1e-7) for lim, ex in enumerate(expected): assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7) # Assert some symmetry count = 0 extras = [[sq2, sq2, 0]] + list(np.eye(3)) for quat in np.concatenate((quats, expected, extras)): if np.isclose(_quat_real(quat), 0., atol=1e-7): # can flip sign count += 1 angle = _angle_between_quats(quat, -quat) assert_allclose(angle, 0., atol=1e-7) rot_0, rot_1 = quat_to_rot(np.array((quat, -quat))) assert_allclose(rot_0, rot_1, atol=1e-7) assert count == 4 + len(extras) @testing.requires_testing_data @pytest.mark.parametrize('subject', ('fsaverage', 'sample')) def test_fs_xfm(subject, tmpdir): """Test reading and writing of Freesurfer transforms.""" fname = op.join(data_path, 'subjects', subject, 'mri', 'transforms', 'talairach.xfm') xfm, kind = _read_fs_xfm(fname) if subject == 'fsaverage': assert_allclose(xfm, np.eye(4), atol=1e-5) # fsaverage is in MNI assert kind == 'MNI Transform File' tempdir = str(tmpdir) fname_out = op.join(tempdir, 'out.xfm') _write_fs_xfm(fname_out, xfm, kind) xfm_read, kind_read = _read_fs_xfm(fname_out) assert kind_read == kind assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5) # Some wacky one xfm[:3] = np.random.RandomState(0).randn(3, 4) _write_fs_xfm(fname_out, xfm, 'foo') xfm_read, kind_read = _read_fs_xfm(fname_out) assert kind_read == 'foo' assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5) # degenerate conditions with open(fname_out, 'w') as fid: fid.write('foo') with pytest.raises(ValueError, match='Failed to find'): _read_fs_xfm(fname_out) _write_fs_xfm(fname_out, xfm[:2], 'foo') with pytest.raises(ValueError, match='Could not find'): _read_fs_xfm(fname_out) @pytest.fixture() def quats(): """Make some unit quats.""" quats = np.random.RandomState(0).randn(5, 3) quats[:, 0] = 0 # identity quats /= 2 * np.linalg.norm(quats, axis=1, keepdims=True) # some real part return quats def _check_fit_matched_points( p, x, weights, do_scale, angtol=1e-5, dtol=1e-5, stol=1e-7): __tracebackhide__ = True mne.coreg._ALLOW_ANALITICAL = False try: params = mne.coreg.fit_matched_points( p, x, weights=weights, scale=do_scale, out='params') finally: mne.coreg._ALLOW_ANALITICAL = True quat_an, scale_an = _fit_matched_points(p, x, weights, scale=do_scale) assert len(params) == 6 + int(do_scale) q_co = _euler_to_quat(params[:3]) translate_co = params[3:6] angle = np.rad2deg(_angle_between_quats(quat_an[:3], q_co)) dist = np.linalg.norm(quat_an[3:] - translate_co) assert 0 <= angle < angtol, 'angle' assert 0 <= dist < dtol, 'dist' if do_scale: scale_co = params[6] assert_allclose(scale_an, scale_co, rtol=stol, err_msg='scale') # errs trans = _quat_to_affine(quat_an) trans[:3, :3] *= scale_an weights = np.ones(1) if weights is None else weights err_an = np.linalg.norm( weights[:, np.newaxis] * apply_trans(trans, p) - x) trans = mne.coreg._trans_from_params((True, True, do_scale), params) err_co = np.linalg.norm( weights[:, np.newaxis] * apply_trans(trans, p) - x) if err_an > 1e-14: assert err_an < err_co * 1.5 return quat_an, scale_an @pytest.mark.parametrize('scaling', [0.25, 1]) @pytest.mark.parametrize('do_scale', (True, False)) def test_fit_matched_points(quats, scaling, do_scale): """Test analytical least-squares matched point fitting.""" if scaling != 1 and not do_scale: return # no need to test this, it will not be good rng = np.random.RandomState(0) fro = rng.randn(10, 3) translation = rng.randn(3) for qi, quat in enumerate(quats): to = scaling * np.dot(quat_to_rot(quat), fro.T).T + translation for corrupted in (False, True): # mess up a point if corrupted: to[0, 2] += 100 weights = np.ones(len(to)) weights[0] = 0 else: weights = None est, scale_est = _check_fit_matched_points( fro, to, weights=weights, do_scale=do_scale) assert_allclose(scale_est, scaling, rtol=1e-5) assert_allclose(est[:3], quat, atol=1e-14) assert_allclose(est[3:], translation, atol=1e-14) # if we don't adjust for the corruption above, it should get worse angle = dist = None for weighted in (False, True): if not weighted: weights = None dist_bounds = (5, 20) if scaling == 1: angle_bounds = (5, 95) angtol, dtol, stol = 1, 15, 3 else: angle_bounds = (5, 105) angtol, dtol, stol = 20, 15, 3 else: weights = np.ones(len(to)) weights[0] = 10 # weighted=True here means "make it worse" angle_bounds = (angle, 180) # unweighted values as new min dist_bounds = (dist, 100) if scaling == 1: # XXX this angtol is not great but there is a hard to # identify linalg/angle calculation bug on Travis... angtol, dtol, stol = 180, 70, 3 else: angtol, dtol, stol = 50, 70, 3 est, scale_est = _check_fit_matched_points( fro, to, weights=weights, do_scale=do_scale, angtol=angtol, dtol=dtol, stol=stol) assert not np.allclose(est[:3], quat, atol=1e-5) assert not np.allclose(est[3:], translation, atol=1e-5) angle = np.rad2deg(_angle_between_quats(est[:3], quat)) assert_array_less(angle_bounds[0], angle) assert_array_less(angle, angle_bounds[1]) dist = np.linalg.norm(est[3:] - translation) assert_array_less(dist_bounds[0], dist) assert_array_less(dist, dist_bounds[1]) def test_euler(quats): """Test euler transformations.""" euler = _quat_to_euler(quats) quats_2 = _euler_to_quat(euler) assert_allclose(quats, quats_2, atol=1e-14) quat_rot = quat_to_rot(quats) euler_rot = np.array([rotation(*e)[:3, :3] for e in euler]) assert_allclose(quat_rot, euler_rot, atol=1e-14) @requires_nibabel() @requires_dipy() @pytest.mark.slowtest @testing.requires_testing_data def test_volume_registration(): """Test volume registration.""" import nibabel as nib from dipy.align import resample T1 = nib.load(fname_t1) affine = np.eye(4) affine[0, 3] = 10 T1_resampled = resample(moving=T1.get_fdata(), static=T1.get_fdata(), moving_affine=T1.affine, static_affine=T1.affine, between_affine=np.linalg.inv(affine)) for pipeline in ('rigids', ('translation', 'sdr')): reg_affine, sdr_morph = mne.transforms.compute_volume_registration( T1_resampled, T1, pipeline=pipeline, zooms=10, niter=[5]) assert_allclose(affine, reg_affine, atol=0.25) T1_aligned = mne.transforms.apply_volume_registration( T1_resampled, T1, reg_affine, sdr_morph) r2 = _compute_r2(_get_img_fdata(T1_aligned), _get_img_fdata(T1)) assert 99.9 < r2 # check that all orders of the pipeline work for pipeline_len in range(1, 5): for pipeline in itertools.combinations( ('translation', 'rigid', 'affine', 'sdr'), pipeline_len): _validate_pipeline(pipeline) _validate_pipeline(list(pipeline)) with pytest.raises(ValueError, match='Steps in pipeline are out of order'): _validate_pipeline(('sdr', 'affine')) with pytest.raises(ValueError, match='Steps in pipeline should not be repeated'): _validate_pipeline(('affine', 'affine'))
from optparse import make_option from django.core.management.base import BaseCommand, CommandError from brambling.utils.payment import dwolla_update_tokens class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option( '--days', action='store', dest='days', default=15, help='Number of days ahead of time to update refresh tokens.'), ) def handle(self, *args, **options): try: days = int(options['days']) except ValueError: raise CommandError("Days must be an integer value.") self.stdout.write("Updating dwolla tokens...") self.stdout.flush() count, test_count = dwolla_update_tokens(days) self.stdout.write("Test tokens updated: {}".format(count)) self.stdout.write("Live tokens updated: {}".format(test_count)) self.stdout.flush()
""" .. _tut-set-eeg-ref: Setting the EEG reference ========================= This tutorial describes how to set or change the EEG reference in MNE-Python. .. contents:: Page contents :local: :depth: 2 As usual we'll start by importing the modules we need, loading some :ref:`example data <sample-dataset>`, and cropping it to save memory. Since this tutorial deals specifically with EEG, we'll also restrict the dataset to just a few EEG channels so the plots are easier to see: """ import os import mne sample_data_folder = mne.datasets.sample.data_path() sample_data_raw_file = os.path.join(sample_data_folder, 'MEG', 'sample', 'sample_audvis_raw.fif') raw = mne.io.read_raw_fif(sample_data_raw_file, verbose=False) raw.crop(tmax=60).load_data() raw.pick(['EEG 0{:02}'.format(n) for n in range(41, 60)]) raw.plot() raw_new_ref = mne.add_reference_channels(raw, ref_channels=['EEG 999']) raw_new_ref.plot() raw_new_ref.set_eeg_reference(ref_channels=['EEG 050']) raw_new_ref.plot() raw_avg_ref = raw.copy().set_eeg_reference(ref_channels='average') raw_avg_ref.plot() raw.set_eeg_reference('average', projection=True) print(raw.info['projs']) for title, proj in zip(['Original', 'Average'], [False, True]): fig = raw.plot(proj=proj, n_channels=len(raw)) # make room for title fig.subplots_adjust(top=0.9) fig.suptitle('{} reference'.format(title), size='xx-large', weight='bold')
import base64 import json from twisted.internet.defer import inlineCallbacks, DeferredQueue, returnValue from twisted.web.http_headers import Headers from twisted.web import http from twisted.web.server import NOT_DONE_YET from vumi.config import ConfigContext from vumi.message import TransportUserMessage, TransportEvent from vumi.tests.helpers import VumiTestCase from vumi.tests.utils import MockHttpServer, LogCatcher from vumi.transports.vumi_bridge.client import StreamingClient from vumi.utils import http_request_full from go.apps.http_api.resource import ( StreamResourceMixin, StreamingConversationResource) from go.apps.tests.helpers import AppWorkerHelper from go.apps.http_api.vumi_app import StreamingHTTPWorker class TestStreamingHTTPWorker(VumiTestCase): @inlineCallbacks def setUp(self): self.app_helper = self.add_helper(AppWorkerHelper(StreamingHTTPWorker)) self.config = { 'health_path': '/health/', 'web_path': '/foo', 'web_port': 0, 'metrics_prefix': 'metrics_prefix.', 'conversation_cache_ttl': 0, } self.app = yield self.app_helper.get_app_worker(self.config) self.addr = self.app.webserver.getHost() self.url = 'http://%s:%s%s' % ( self.addr.host, self.addr.port, self.config['web_path']) conv_config = { 'http_api': { 'api_tokens': [ 'token-1', 'token-2', 'token-3', ], 'metric_store': 'metric_store', } } conversation = yield self.app_helper.create_conversation( config=conv_config) yield self.app_helper.start_conversation(conversation) self.conversation = yield self.app_helper.get_conversation( conversation.key) self.auth_headers = { 'Authorization': ['Basic ' + base64.b64encode('%s:%s' % ( conversation.user_account.key, 'token-1'))], } self.client = StreamingClient() # Mock server to test HTTP posting of inbound messages & events self.mock_push_server = MockHttpServer(self.handle_request) yield self.mock_push_server.start() self.add_cleanup(self.mock_push_server.stop) self.push_calls = DeferredQueue() self._setup_wait_for_request() self.add_cleanup(self._wait_for_requests) def _setup_wait_for_request(self): # Hackery to wait for the request to finish self._req_state = { 'queue': DeferredQueue(), 'expected': 0, } orig_track = StreamingConversationResource.track_request orig_release = StreamingConversationResource.release_request def track_wrapper(*args, **kw): self._req_state['expected'] += 1 return orig_track(*args, **kw) def release_wrapper(*args, **kw): return orig_release(*args, **kw).addCallback( self._req_state['queue'].put) self.patch( StreamingConversationResource, 'track_request', track_wrapper) self.patch( StreamingConversationResource, 'release_request', release_wrapper) @inlineCallbacks def _wait_for_requests(self): while self._req_state['expected'] > 0: yield self._req_state['queue'].get() self._req_state['expected'] -= 1 def handle_request(self, request): self.push_calls.put(request) return NOT_DONE_YET @inlineCallbacks def pull_message(self, count=1): url = '%s/%s/messages.json' % (self.url, self.conversation.key) messages = DeferredQueue() errors = DeferredQueue() receiver = self.client.stream( TransportUserMessage, messages.put, errors.put, url, Headers(self.auth_headers)) received_messages = [] for msg_id in range(count): yield self.app_helper.make_dispatch_inbound( 'in %s' % (msg_id,), message_id=str(msg_id), conv=self.conversation) recv_msg = yield messages.get() received_messages.append(recv_msg) receiver.disconnect() returnValue((receiver, received_messages)) def assert_bad_request(self, response, reason): self.assertEqual(response.code, http.BAD_REQUEST) self.assertEqual( response.headers.getRawHeaders('content-type'), ['application/json; charset=utf-8']) data = json.loads(response.delivered_body) self.assertEqual(data, { "success": False, "reason": reason, }) @inlineCallbacks def test_proxy_buffering_headers_off(self): # This is the default, but we patch it anyway to make sure we're # testing the right thing should the default change. self.patch(StreamResourceMixin, 'proxy_buffering', False) receiver, received_messages = yield self.pull_message() headers = receiver._response.headers self.assertEqual(headers.getRawHeaders('x-accel-buffering'), ['no']) @inlineCallbacks def test_proxy_buffering_headers_on(self): self.patch(StreamResourceMixin, 'proxy_buffering', True) receiver, received_messages = yield self.pull_message() headers = receiver._response.headers self.assertEqual(headers.getRawHeaders('x-accel-buffering'), ['yes']) @inlineCallbacks def test_content_type(self): receiver, received_messages = yield self.pull_message() headers = receiver._response.headers self.assertEqual( headers.getRawHeaders('content-type'), ['application/json; charset=utf-8']) @inlineCallbacks def test_messages_stream(self): url = '%s/%s/messages.json' % (self.url, self.conversation.key) messages = DeferredQueue() errors = DeferredQueue() receiver = self.client.stream( TransportUserMessage, messages.put, errors.put, url, Headers(self.auth_headers)) msg1 = yield self.app_helper.make_dispatch_inbound( 'in 1', message_id='1', conv=self.conversation) msg2 = yield self.app_helper.make_dispatch_inbound( 'in 2', message_id='2', conv=self.conversation) rm1 = yield messages.get() rm2 = yield messages.get() receiver.disconnect() # Sometimes messages arrive out of order if we're hitting real redis. rm1, rm2 = sorted([rm1, rm2], key=lambda m: m['message_id']) self.assertEqual(msg1['message_id'], rm1['message_id']) self.assertEqual(msg2['message_id'], rm2['message_id']) self.assertEqual(errors.size, None) @inlineCallbacks def test_events_stream(self): url = '%s/%s/events.json' % (self.url, self.conversation.key) events = DeferredQueue() errors = DeferredQueue() receiver = yield self.client.stream(TransportEvent, events.put, events.put, url, Headers(self.auth_headers)) msg1 = yield self.app_helper.make_stored_outbound( self.conversation, 'out 1', message_id='1') ack1 = yield self.app_helper.make_dispatch_ack( msg1, conv=self.conversation) msg2 = yield self.app_helper.make_stored_outbound( self.conversation, 'out 2', message_id='2') ack2 = yield self.app_helper.make_dispatch_ack( msg2, conv=self.conversation) ra1 = yield events.get() ra2 = yield events.get() receiver.disconnect() # Sometimes messages arrive out of order if we're hitting real redis. if ra1['event_id'] != ack1['event_id']: ra1, ra2 = ra2, ra1 self.assertEqual(ack1['event_id'], ra1['event_id']) self.assertEqual(ack2['event_id'], ra2['event_id']) self.assertEqual(errors.size, None) @inlineCallbacks def test_missing_auth(self): url = '%s/%s/messages.json' % (self.url, self.conversation.key) queue = DeferredQueue() receiver = self.client.stream( TransportUserMessage, queue.put, queue.put, url) response = yield receiver.get_response() self.assertEqual(response.code, http.UNAUTHORIZED) self.assertEqual(response.headers.getRawHeaders('www-authenticate'), [ 'basic realm="Conversation Realm"']) @inlineCallbacks def test_invalid_auth(self): url = '%s/%s/messages.json' % (self.url, self.conversation.key) queue = DeferredQueue() headers = Headers({ 'Authorization': ['Basic %s' % (base64.b64encode('foo:bar'),)], }) receiver = self.client.stream( TransportUserMessage, queue.put, queue.put, url, headers) response = yield receiver.get_response() self.assertEqual(response.code, http.UNAUTHORIZED) self.assertEqual(response.headers.getRawHeaders('www-authenticate'), [ 'basic realm="Conversation Realm"']) @inlineCallbacks def test_send_to(self): msg = { 'to_addr': '+2345', 'content': 'foo', 'message_id': 'evil_id', } # TaggingMiddleware.add_tag_to_msg(msg, self.tag) url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assertEqual( response.headers.getRawHeaders('content-type'), ['application/json; charset=utf-8']) self.assertEqual(response.code, http.OK) put_msg = json.loads(response.delivered_body) [sent_msg] = self.app_helper.get_dispatched_outbound() self.assertEqual(sent_msg['to_addr'], sent_msg['to_addr']) self.assertEqual(sent_msg['helper_metadata'], { 'go': { 'conversation_key': self.conversation.key, 'conversation_type': 'http_api', 'user_account': self.conversation.user_account.key, }, }) # We do not respect the message_id that's been given. self.assertNotEqual(sent_msg['message_id'], msg['message_id']) self.assertEqual(sent_msg['message_id'], put_msg['message_id']) self.assertEqual(sent_msg['to_addr'], msg['to_addr']) self.assertEqual(sent_msg['from_addr'], None) @inlineCallbacks def test_send_to_within_content_length_limit(self): self.conversation.config['http_api'].update({ 'content_length_limit': 182, }) yield self.conversation.save() msg = { 'content': 'foo', 'to_addr': '+1234', } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assertEqual( response.headers.getRawHeaders('content-type'), ['application/json; charset=utf-8']) put_msg = json.loads(response.delivered_body) self.assertEqual(response.code, http.OK) [sent_msg] = self.app_helper.get_dispatched_outbound() self.assertEqual(sent_msg['to_addr'], put_msg['to_addr']) self.assertEqual(sent_msg['helper_metadata'], { 'go': { 'conversation_key': self.conversation.key, 'conversation_type': 'http_api', 'user_account': self.conversation.user_account.key, }, }) self.assertEqual(sent_msg['message_id'], put_msg['message_id']) self.assertEqual(sent_msg['session_event'], None) self.assertEqual(sent_msg['to_addr'], '+1234') self.assertEqual(sent_msg['from_addr'], None) @inlineCallbacks def test_send_to_content_too_long(self): self.conversation.config['http_api'].update({ 'content_length_limit': 10, }) yield self.conversation.save() msg = { 'content': "This message is longer than 10 characters.", 'to_addr': '+1234', } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full( url, json.dumps(msg), self.auth_headers, method='PUT') self.assert_bad_request( response, "Payload content too long: 42 > 10") @inlineCallbacks def test_send_to_with_evil_content(self): msg = { 'content': 0xBAD, 'to_addr': '+1234', } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assert_bad_request( response, "Invalid or missing value for payload key 'content'") @inlineCallbacks def test_send_to_with_evil_to_addr(self): msg = { 'content': 'good', 'to_addr': 1234, } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assert_bad_request( response, "Invalid or missing value for payload key 'to_addr'") @inlineCallbacks def test_in_reply_to(self): inbound_msg = yield self.app_helper.make_stored_inbound( self.conversation, 'in 1', message_id='1') msg = { 'content': 'foo', 'in_reply_to': inbound_msg['message_id'], } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assertEqual( response.headers.getRawHeaders('content-type'), ['application/json; charset=utf-8']) put_msg = json.loads(response.delivered_body) self.assertEqual(response.code, http.OK) [sent_msg] = self.app_helper.get_dispatched_outbound() self.assertEqual(sent_msg['to_addr'], put_msg['to_addr']) self.assertEqual(sent_msg['helper_metadata'], { 'go': { 'conversation_key': self.conversation.key, 'conversation_type': 'http_api', 'user_account': self.conversation.user_account.key, }, }) self.assertEqual(sent_msg['message_id'], put_msg['message_id']) self.assertEqual(sent_msg['session_event'], None) self.assertEqual(sent_msg['to_addr'], inbound_msg['from_addr']) self.assertEqual(sent_msg['from_addr'], '9292') @inlineCallbacks def test_in_reply_to_within_content_length_limit(self): self.conversation.config['http_api'].update({ 'content_length_limit': 182, }) yield self.conversation.save() inbound_msg = yield self.app_helper.make_stored_inbound( self.conversation, 'in 1', message_id='1') msg = { 'content': 'foo', 'in_reply_to': inbound_msg['message_id'], } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assertEqual( response.headers.getRawHeaders('content-type'), ['application/json; charset=utf-8']) put_msg = json.loads(response.delivered_body) self.assertEqual(response.code, http.OK) [sent_msg] = self.app_helper.get_dispatched_outbound() self.assertEqual(sent_msg['to_addr'], put_msg['to_addr']) self.assertEqual(sent_msg['helper_metadata'], { 'go': { 'conversation_key': self.conversation.key, 'conversation_type': 'http_api', 'user_account': self.conversation.user_account.key, }, }) self.assertEqual(sent_msg['message_id'], put_msg['message_id']) self.assertEqual(sent_msg['session_event'], None) self.assertEqual(sent_msg['to_addr'], inbound_msg['from_addr']) self.assertEqual(sent_msg['from_addr'], '9292') @inlineCallbacks def test_in_reply_to_content_too_long(self): self.conversation.config['http_api'].update({ 'content_length_limit': 10, }) yield self.conversation.save() inbound_msg = yield self.app_helper.make_stored_inbound( self.conversation, 'in 1', message_id='1') msg = { 'content': "This message is longer than 10 characters.", 'in_reply_to': inbound_msg['message_id'], } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full( url, json.dumps(msg), self.auth_headers, method='PUT') self.assert_bad_request( response, "Payload content too long: 42 > 10") @inlineCallbacks def test_in_reply_to_with_evil_content(self): inbound_msg = yield self.app_helper.make_stored_inbound( self.conversation, 'in 1', message_id='1') msg = { 'content': 0xBAD, 'in_reply_to': inbound_msg['message_id'], } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assert_bad_request( response, "Invalid or missing value for payload key 'content'") @inlineCallbacks def test_invalid_in_reply_to(self): msg = { 'content': 'foo', 'in_reply_to': '1', # this doesn't exist } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assert_bad_request(response, 'Invalid in_reply_to value') @inlineCallbacks def test_invalid_in_reply_to_with_missing_conversation_key(self): # create a message with no conversation inbound_msg = self.app_helper.make_inbound('in 1', message_id='msg-1') vumi_api = self.app_helper.vumi_helper.get_vumi_api() yield vumi_api.mdb.add_inbound_message(inbound_msg) msg = { 'content': 'foo', 'in_reply_to': inbound_msg['message_id'], } url = '%s/%s/messages.json' % (self.url, self.conversation.key) with LogCatcher(message='Invalid reply to message <Message .*>' ' which has no conversation key') as lc: response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') [error_log] = lc.messages() self.assert_bad_request(response, "Invalid in_reply_to value") self.assertTrue(inbound_msg['message_id'] in error_log) @inlineCallbacks def test_in_reply_to_with_evil_session_event(self): inbound_msg = yield self.app_helper.make_stored_inbound( self.conversation, 'in 1', message_id='1') msg = { 'content': 'foo', 'in_reply_to': inbound_msg['message_id'], 'session_event': 0xBAD5E55104, } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assert_bad_request( response, "Invalid or missing value for payload key 'session_event'") self.assertEqual(self.app_helper.get_dispatched_outbound(), []) @inlineCallbacks def test_in_reply_to_with_evil_message_id(self): inbound_msg = yield self.app_helper.make_stored_inbound( self.conversation, 'in 1', message_id='1') msg = { 'content': 'foo', 'in_reply_to': inbound_msg['message_id'], 'message_id': 'evil_id' } url = '%s/%s/messages.json' % (self.url, self.conversation.key) response = yield http_request_full(url, json.dumps(msg), self.auth_headers, method='PUT') self.assertEqual(response.code, http.OK) self.assertEqual( response.headers.getRawHeaders('content-type'), ['application/json; charset=utf-8']) put_msg = json.loads(response.delivered_body) [sent_msg] = self.app_helper.get_dispatched_outbound() # We do not respect the message_id that's been given. self.assertNotEqual(sent_msg['message_id'], msg['message_id']) self.assertEqual(sent_msg['message_id'], put_msg['message_id']) self.assertEqual(sent_msg['to_addr'], inbound_msg['from_addr']) self.assertEqual(sent_msg['from_addr'], '9292') @inlineCallbacks def test_metric_publishing(self): metric_data = [ ("vumi.test.v1", 1234, 'SUM'), ("vumi.test.v2", 3456, 'AVG'), ] url = '%s/%s/metrics.json' % (self.url, self.conversation.key) response = yield http_request_full( url, json.dumps(metric_data), self.auth_headers, method='PUT') self.assertEqual(response.code, http.OK) self.assertEqual( response.headers.getRawHeaders('content-type'), ['application/json; charset=utf-8']) prefix = "go.campaigns.test-0-user.stores.metric_store" self.assertEqual( self.app_helper.get_published_metrics(self.app), [("%s.vumi.test.v1" % prefix, 1234), ("%s.vumi.test.v2" % prefix, 3456)]) @inlineCallbacks def test_concurrency_limits(self): config = yield self.app.get_config(None) concurrency = config.concurrency_limit queue = DeferredQueue() url = '%s/%s/messages.json' % (self.url, self.conversation.key) max_receivers = [self.client.stream( TransportUserMessage, queue.put, queue.put, url, Headers(self.auth_headers)) for _ in range(concurrency)] for i in range(concurrency): msg = yield self.app_helper.make_dispatch_inbound( 'in %s' % (i,), message_id=str(i), conv=self.conversation) received = yield queue.get() self.assertEqual(msg['message_id'], received['message_id']) maxed_out_resp = yield http_request_full( url, method='GET', headers=self.auth_headers) self.assertEqual(maxed_out_resp.code, 403) self.assertTrue( 'Too many concurrent connections' in maxed_out_resp.delivered_body) [r.disconnect() for r in max_receivers] @inlineCallbacks def test_disabling_concurrency_limit(self): conv_resource = StreamingConversationResource( self.app, self.conversation.key) # negative concurrency limit disables it ctxt = ConfigContext(user_account=self.conversation.user_account.key, concurrency_limit=-1) config = yield self.app.get_config(msg=None, ctxt=ctxt) self.assertTrue( (yield conv_resource.is_allowed( config, self.conversation.user_account.key))) @inlineCallbacks def test_backlog_on_connect(self): for i in range(10): yield self.app_helper.make_dispatch_inbound( 'in %s' % (i,), message_id=str(i), conv=self.conversation) queue = DeferredQueue() url = '%s/%s/messages.json' % (self.url, self.conversation.key) receiver = self.client.stream( TransportUserMessage, queue.put, queue.put, url, Headers(self.auth_headers)) for i in range(10): received = yield queue.get() self.assertEqual(received['message_id'], str(i)) receiver.disconnect() @inlineCallbacks def test_health_response(self): health_url = 'http://%s:%s%s' % ( self.addr.host, self.addr.port, self.config['health_path']) response = yield http_request_full(health_url, method='GET') self.assertEqual(response.delivered_body, '0') yield self.app_helper.make_dispatch_inbound( 'in 1', message_id='1', conv=self.conversation) queue = DeferredQueue() stream_url = '%s/%s/messages.json' % (self.url, self.conversation.key) stream_receiver = self.client.stream( TransportUserMessage, queue.put, queue.put, stream_url, Headers(self.auth_headers)) yield queue.get() response = yield http_request_full(health_url, method='GET') self.assertEqual(response.delivered_body, '1') stream_receiver.disconnect() response = yield http_request_full(health_url, method='GET') self.assertEqual(response.delivered_body, '0') self.assertEqual(self.app.client_manager.clients, { 'sphex.stream.message.%s' % (self.conversation.key,): [] }) @inlineCallbacks def test_post_inbound_message(self): # Set the URL so stuff is HTTP Posted instead of streamed. self.conversation.config['http_api'].update({ 'push_message_url': self.mock_push_server.url, }) yield self.conversation.save() msg_d = self.app_helper.make_dispatch_inbound( 'in 1', message_id='1', conv=self.conversation) req = yield self.push_calls.get() posted_json_data = req.content.read() req.finish() msg = yield msg_d posted_msg = TransportUserMessage.from_json(posted_json_data) self.assertEqual(posted_msg['message_id'], msg['message_id']) @inlineCallbacks def test_post_inbound_message_201_response(self): # Set the URL so stuff is HTTP Posted instead of streamed. self.conversation.config['http_api'].update({ 'push_message_url': self.mock_push_server.url, }) yield self.conversation.save() with LogCatcher(message='Got unexpected response code') as lc: msg_d = self.app_helper.make_dispatch_inbound( 'in 1', message_id='1', conv=self.conversation) req = yield self.push_calls.get() req.setResponseCode(201) req.finish() yield msg_d self.assertEqual(lc.messages(), []) @inlineCallbacks def test_post_inbound_message_500_response(self): # Set the URL so stuff is HTTP Posted instead of streamed. self.conversation.config['http_api'].update({ 'push_message_url': self.mock_push_server.url, }) yield self.conversation.save() with LogCatcher(message='Got unexpected response code') as lc: msg_d = self.app_helper.make_dispatch_inbound( 'in 1', message_id='1', conv=self.conversation) req = yield self.push_calls.get() req.setResponseCode(500) req.finish() yield msg_d [warning_log] = lc.messages() self.assertTrue(self.mock_push_server.url in warning_log) self.assertTrue('500' in warning_log) @inlineCallbacks def test_post_inbound_event(self): # Set the URL so stuff is HTTP Posted instead of streamed. self.conversation.config['http_api'].update({ 'push_event_url': self.mock_push_server.url, }) yield self.conversation.save() msg = yield self.app_helper.make_stored_outbound( self.conversation, 'out 1', message_id='1') event_d = self.app_helper.make_dispatch_ack( msg, conv=self.conversation) req = yield self.push_calls.get() posted_json_data = req.content.read() req.finish() ack = yield event_d self.assertEqual(TransportEvent.from_json(posted_json_data), ack) @inlineCallbacks def test_bad_urls(self): def assert_not_found(url, headers={}): d = http_request_full(self.url, method='GET', headers=headers) d.addCallback(lambda r: self.assertEqual(r.code, http.NOT_FOUND)) return d yield assert_not_found(self.url) yield assert_not_found(self.url + '/') yield assert_not_found('%s/%s' % (self.url, self.conversation.key), headers=self.auth_headers) yield assert_not_found('%s/%s/' % (self.url, self.conversation.key), headers=self.auth_headers) yield assert_not_found('%s/%s/foo' % (self.url, self.conversation.key), headers=self.auth_headers) @inlineCallbacks def test_send_message_command(self): yield self.app_helper.dispatch_command( 'send_message', user_account_key=self.conversation.user_account.key, conversation_key=self.conversation.key, command_data={ u'batch_id': u'batch-id', u'content': u'foo', u'to_addr': u'to_addr', u'msg_options': { u'helper_metadata': { u'tag': { u'tag': [u'longcode', u'default10080'] } }, u'from_addr': u'default10080', } }) [msg] = self.app_helper.get_dispatched_outbound() self.assertEqual(msg.payload['to_addr'], "to_addr") self.assertEqual(msg.payload['from_addr'], "default10080") self.assertEqual(msg.payload['content'], "foo") self.assertEqual(msg.payload['message_type'], "user_message") self.assertEqual( msg.payload['helper_metadata']['go']['user_account'], self.conversation.user_account.key) self.assertEqual( msg.payload['helper_metadata']['tag']['tag'], ['longcode', 'default10080']) @inlineCallbacks def test_process_command_send_message_in_reply_to(self): msg = yield self.app_helper.make_stored_inbound( self.conversation, "foo") yield self.app_helper.dispatch_command( 'send_message', user_account_key=self.conversation.user_account.key, conversation_key=self.conversation.key, command_data={ u'batch_id': u'batch-id', u'content': u'foo', u'to_addr': u'to_addr', u'msg_options': { u'helper_metadata': { u'tag': { u'tag': [u'longcode', u'default10080'] } }, u'transport_name': u'smpp_transport', u'in_reply_to': msg['message_id'], u'transport_type': u'sms', u'from_addr': u'default10080', } }) [sent_msg] = self.app_helper.get_dispatched_outbound() self.assertEqual(sent_msg['to_addr'], msg['from_addr']) self.assertEqual(sent_msg['content'], 'foo') self.assertEqual(sent_msg['in_reply_to'], msg['message_id'])
from flask import request, current_app, url_for from flask_jsonschema import validate from .. import db from ..models import AHBot as Bot from .decorators import json_response from . import api @api.route('/abusehelper', methods=['GET']) @json_response def get_abusehelper(): """Return a list of available abusehelper **Example request**: .. sourcecode:: http GET /api/1.0/abusehelper HTTP/1.1 Host: do.cert.europa.eu Accept: application/json **Example response**: .. sourcecode:: http HTTP/1.0 200 OK Content-Type: application/json { "abusehelper": [ { "name": "ShadowServerBot", "url": "http://sample.com/path.html", "id": 1 } ] } :reqheader Accept: Content type(s) accepted by the client :resheader Content-Type: this depends on `Accept` header or request :>json array abusehelper: List of available bots :>jsonobj integer id: Bot ID :>jsonobj integer name: Bot name :status 200: Deliverable endpoint found, response may be empty :status 404: Not found """ bots = Bot.query.filter().all() return {'abusehelper': [a.serialize() for a in bots]} @api.route('/abusehelper/<int:bot_id>', methods=['GET']) @json_response def get_got(bot_id): """Get bot from database **Example request**: .. sourcecode:: http GET /api/1.0/abusehelper/1 HTTP/1.1 Host: do.cert.europa.eu Accept: application/json **Example response**: .. sourcecode:: http HTTP/1.0 200 OK Content-Type: application/json { "name": "ShadowServerBot", "url": "http://sample.com/path.html", "id": 1 } :param bot_id: Bot unique ID :reqheader Accept: Content type(s) accepted by the client :resheader Content-Type: this depends on `Accept` header or request :>json integer id: Bot unique ID :>json integer name: Bot name :status 200: ASN found :status 404: Resource not found """ a = Bot.query.get_or_404(bot_id) return a.serialize() @api.route('/abusehelper', methods=['POST', 'PUT']) @validate('abusehelper', 'add_bot') @json_response def add_bot(): """Add new bot entry **Example request**: .. sourcecode:: http POST /api/1.0/abusehelper HTTP/1.1 Host: do.cert.europa.eu Accept: application/json Content-Type: application/json { "name": "ShadowServerBot", "url": "http://sample.com/path.html" } **Example response**: .. sourcecode:: http HTTP/1.0 201 CREATED Content-Type: application/json { "bot": { "name": "ShadowServerBot", "url": "http://sample.com/path.html", "id": 1 }, 'message': "Bot added" } :reqheader Accept: Content type(s) accepted by the client :resheader Content-Type: this depends on `Accept` header or request :<json integer name: Bot name :>jsonobj integer id: Unique ID of new bot :>jsonobj integer name: bot name :>json string message: Status message :status 201: ASN successfully saved :status 400: Bad request """ a = Bot.fromdict(request.json) db.session.add(a) db.session.commit() return {'bot': a.serialize(), 'message': 'Bot added'}, 201, \ {'Location': url_for('api.get_bot', bot_id=a.id)} @api.route('/abusehelper/<int:bot_id>', methods=['PUT']) @validate('abusehelper', 'update_bot') @json_response def update_bot(bot_id): return NotImplemented @api.route('/abusehelper/<int:bot_id>', methods=['DELETE']) @json_response def delete_bot(bot_id): """Delete bot **Example request**: .. sourcecode:: http DELETE /api/1.0/abusehelper/1 HTTP/1.1 Host: do.cert.europa.eu Accept: application/json **Example response**: .. sourcecode:: http HTTP/1.0 200 OK Content-Type: application/json { "message": "Bot deleted" } :param bot_id: Bot unique ID. :reqheader Accept: Content type(s) accepted by the client :resheader Content-Type: this depends on `Accept` header or request :>json string message: Action status status :status 200: Bot was deleted :status 404: Bot was not found """ a = Bot.query.filter_by(id == bot_id).delete() if not a: return {'message': 'No such bot'}, 404 db.session.commit() return {'message': 'Bot deleted'} @api.route('/abusehelper', methods=['DELETE']) @json_response def delete_abusehelper(): """Clear abusehelper table **Example request**: .. sourcecode:: http DELETE /api/1.0/abusehelper HTTP/1.1 Host: do.cert.europa.eu Accept: application/json **Example response**: .. sourcecode:: http HTTP/1.0 200 OK Content-Type: application/json { "message": "Bots deleted" } :reqheader Accept: Content type(s) accepted by the client :resheader Content-Type: this depends on `Accept` header or request :>json string message: Action status status :status 200: Bot was deleted :status 404: Bot was not found """ a = Bot.query.all().delete() db.session.commit() current_app.log.debug('Deleted {} abusehelper'.format(a)) return {'message': 'Bots deleted'}
from django.contrib import admin from .models import Photos admin.site.register(Photos)
""" Tests for the BetterZoom Chaco tool """ import unittest import numpy from chaco.api import create_line_plot from chaco.tools.api import BetterZoom from enable.testing import EnableTestAssistant class TestBetterZoomTool(EnableTestAssistant, unittest.TestCase): """ Tests for the BetterZoom Chaco tool """ def setUp(self): values = numpy.arange(10) self.plot = create_line_plot((values, values)) self.plot.bounds = [100, 100] self.plot._window = self.create_mock_window() self.tool = BetterZoom(component=self.plot) self.plot.active_tool = self.tool self.plot.do_layout() def tearDown(self): del self.tool del self.plot def test_default_position(self): tool = self.tool # this doesn't throw an exception self.send_key(tool, '+') self.assertEqual(tool.position, (50, 50)) # expected behaviour for a normal zoom in operation self.assertNotEqual(tool._index_factor, 1.0) self.assertNotEqual(tool._value_factor, 1.0) self.assertEqual(len(tool._history), 2)
import pytest from py4jdbc.dbapi2 import connect, Connection from py4jdbc.resultset import ResultSet from py4jdbc.exceptions.dbapi2 import Error def test_connect(gateway): url = "jdbc:derby:memory:testdb;create=true" conn = connect(url, gateway=gateway) cur = conn.cursor() rs = cur.execute("select * from SYS.SYSTABLES") assert isinstance(rs, ResultSet) def test_execute(derby): cur = derby.cursor() rs = cur.execute("select * from SYS.SYSTABLES") assert isinstance(rs, ResultSet) def test_execute_with_params(derby): derby.autocommit = False cur = derby.cursor() cur.execute("create schema x_with_params") cur.execute("create table x_with_params.cowtest(a int, b char(1))") # Verify table is empty. rows = cur.execute("select * from x_with_params.cowtest as r").fetchall() assert len(rows) == 0 # Insert one with parameter binding.. sql = "insert into x_with_params.cowtest (a, b) values (?, ?)" cur.execute(sql, (12, "m")) # Verify there's 1 row. rows = cur.execute("select * from x_with_params.cowtest as r").fetchall() assert len(rows) == 1 # Insert a bunch. params = list(enumerate("thecowsaremooing")) cur.executemany(sql, params) rows = cur.execute("select * from x_with_params.cowtest as r").fetchall() assert len(rows) == len("thecowsaremooing") + 1 derby.rollback() derby.autocommit = True def test_fetchone(derby): cur = derby.cursor() rs = cur.execute("select * from SYS.SYSTABLES") assert isinstance(rs.fetchone(), rs.Row) def test_fetchmany(derby): '''Assert all rows of result set have the correct class. ''' cur = derby.cursor() rs = cur.execute("select * from SYS.SYSTABLES") assert all({isinstance(row, rs.Row) for row in rs.fetchmany(5)}) def test_fetchManyCount(derby): derby.autocommit = False cur = derby.cursor() cur.execute("create schema x_with_params") cur.execute("create table x_with_params.cowtest(a int, b char(1))") sql = "insert into x_with_params.cowtest (a, b) values (?, ?)" params = list(enumerate("thecowsaremooing")) cur.executemany(sql, params) rs = cur.execute("select a from x_with_params.cowtest") ress = [] while True: x = rs.fetchmany(3) ress.append(x) if len(x) < 3: break derby.rollback() derby.autocommit = True assert sum(map(len, ress)) == len("thecowsaremooing") def test_fetchall(derby): '''Assert all rows of result set have the correct class. ''' cur = derby.cursor() rs = cur.execute("select * from SYS.SYSTABLES") assert all({isinstance(row, rs.Row) for row in rs.fetchall()}) def test_Cursor__iter__(derby): cur = derby.cursor() rs = cur.execute("select * from SYS.SYSTABLES") assert all({isinstance(row, rs.Row) for row in rs}) def test_Cursor__iter__(derby): cur = derby.cursor() rs = cur.execute("select * from SYS.SYSTABLES") # Exhaust all rows. list(rs) assert rs.fetchone() == None def test_close_and_execute(derby): cur = derby.cursor() cur.close() with pytest.raises(Error): cur.execute("select * from SYS.SYSTABLES") def test_close_and_fetchone(derby): cur = derby.cursor() cur.execute("select * from SYS.SYSTABLES") cur.close() with pytest.raises(Error): cur.fetchone() def test_close_twice(derby): cur = derby.cursor() cur.close() with pytest.raises(Error): cur.close()
"""Unit-tests for `tree.visitors` """ from py2c import tree from py2c.tree import visitors from py2c.tests import Test, data_driven_test from nose.tools import assert_equal class BasicNode(tree.Node): _fields = [] class BasicNodeReplacement(tree.Node): _fields = [] class BasicNodeWithListReplacement(tree.Node): _fields = [] class BasicNodeDeletable(tree.Node): _fields = [] class ParentNode(tree.Node): _fields = [ ('child', tree.Node, 'OPTIONAL'), ] class ParentNodeWithChildrenList(tree.Node): """Node with list of nodes as field """ _fields = [ ('child', tree.Node, 'ZERO_OR_MORE'), ] class VisitOrderCheckingVisitor(visitors.RecursiveNodeVisitor): def __init__(self): super().__init__() self.visited = [] def generic_visit(self, node): self.visited.append(node.__class__.__name__) super().generic_visit(node) def visit_BasicNodeReplacement(self, node): self.visited.append("visited Copy!") class AccessPathCheckingVisitor(visitors.RecursiveNodeVisitor): def __init__(self): super().__init__() self.recorded_access_path = None def visit_BasicNode(self, node): self.recorded_access_path = self.access_path[:] class EmptyTransformer(visitors.RecursiveNodeTransformer): pass class VisitOrderCheckingTransformer(visitors.RecursiveNodeTransformer): def __init__(self): super().__init__() self.visited = [] def generic_visit(self, node): self.visited.append(node.__class__.__name__) return super().generic_visit(node) def visit_BasicNodeReplacement(self, node): self.visited.append("visited Copy!") return node class AccessPathCheckingTransformer(visitors.RecursiveNodeTransformer): def __init__(self): super().__init__() self.recorded_access_path = None def visit_BasicNode(self, node): self.recorded_access_path = self.access_path[:] return node class TransformationCheckingTransformer(visitors.RecursiveNodeTransformer): def visit_BasicNode(self, node): return BasicNodeReplacement() def visit_BasicNodeDeletable(self, node): return None # Delete this node def visit_BasicNodeReplacement(self, node): return self.NONE_DEPUTY # Replace this node with None def visit_BasicNodeWithListReplacement(self, node): return [BasicNode(), BasicNodeReplacement()] class TestRecursiveASTVisitor(Test): """py2c.tree.visitors.RecursiveNodeVisitor """ context = globals() @data_driven_test("visitors-visitor_order.yaml", prefix="visit order of ") def test_visit_order(self, node, order): to_visit = self.load(node) # The main stuff visitor = VisitOrderCheckingVisitor() retval = visitor.visit(to_visit) assert_equal(retval, None) assert_equal(visitor.visited, order) @data_driven_test("visitors-access_path.yaml", prefix="access path on visit of ") def test_access_path(self, node, access): to_visit = self.load(node) access_path = self.load(access) # The main stuff visitor = AccessPathCheckingVisitor() retval = visitor.visit(to_visit) assert_equal(retval, None) assert_equal(visitor.recorded_access_path, access_path) class TestRecursiveASTTransformer(Test): """py2c.tree.visitors.RecursiveNodeTransformer """ context = globals() @data_driven_test("visitors-visitor_order.yaml", prefix="empty transformer does not transform ") def test_empty_transformer(self, node, order): to_visit = self.load(node) # The main stuff visitor = EmptyTransformer() retval = visitor.visit(to_visit) assert_equal(to_visit, retval) @data_driven_test("visitors-visitor_order.yaml", prefix="visit order of ") def test_visit_order(self, node, order): to_visit = self.load(node) # The main stuff visitor = VisitOrderCheckingTransformer() retval = visitor.visit(to_visit) assert_equal(to_visit, retval) assert_equal(visitor.visited, order) @data_driven_test("visitors-access_path.yaml", prefix="access path on visit of ") def test_access_path(self, node, access): to_visit = self.load(node) access_path = self.load(access) # The main stuff visitor = AccessPathCheckingTransformer() retval = visitor.visit(to_visit) assert_equal(retval, to_visit) assert_equal(visitor.recorded_access_path, access_path) @data_driven_test("visitors-transform.yaml", prefix="transformation of ") def test_transformation(self, node, expected): to_visit = self.load(node) expected_node = self.load(expected) # The main stuff visitor = TransformationCheckingTransformer() retval = visitor.visit(to_visit) assert_equal(retval, expected_node) if __name__ == '__main__': from py2c.tests import runmodule runmodule()
from importlib import import_module from inspect import getdoc def attribs(name): mod = import_module(name) print name print 'Has __all__?', hasattr(mod, '__all__') print 'Has __doc__?', hasattr(mod, '__doc__') print 'doc: ', getdoc(mod) if __name__=='__main__': attribs('cairo') attribs('zope') attribs('A.B.C') import hacked class Object(object): pass opt = Object() opt.ignore_errors = False a, d = hacked.get_all_attr_has_docstr('/home/ali/ws-pydev/apidocfilter/A/B', '/home/ali/ws-pydev/apidocfilter/A/B/C', opt) print(a) print(d)
class Requirement(object): """ Requirements are the basis for Dominion. They define what needs to exist on a host/role, or perhaps what *mustn't* exist. Requirements are defined on Roles. """ creation_counter = 0 "The base class for requirements." def __init__(self, required=True, ensure=None, depends=None, post=None): self.required = required self.ensure = ensure or "exists" self.depends = depends or () if self.ensure == "removed": self.required = False self.post = post or () # Increase the creation counter, and save our local copy. self.creation_counter = Requirement.creation_counter Requirement.creation_counter += 1 def __call__(self): self.apply() def apply(self): if self.ensure == "exists" or self.required: if hasattr(self, 'install'): return self.install() if self.ensure == "removed": if hasattr(self, 'uninstall'): return self.uninstall()
import py try: from pypy.rpython.test.test_llinterp import interpret except ImportError: py.test.skip('Needs PyPy to be on the PYTHONPATH') from rply import ParserGenerator, Token from rply.errors import ParserGeneratorWarning from .base import BaseTests from .utils import FakeLexer, BoxInt, ParserState class TestTranslation(BaseTests): def run(self, func, args): return interpret(func, args) def test_basic(self): pg = ParserGenerator(["NUMBER", "PLUS"]) @pg.production("main : expr") def main(p): return p[0] @pg.production("expr : expr PLUS expr") def expr_op(p): return BoxInt(p[0].getint() + p[2].getint()) @pg.production("expr : NUMBER") def expr_num(p): return BoxInt(int(p[0].getstr())) with self.assert_warns(ParserGeneratorWarning, "1 shift/reduce conflict"): parser = pg.build() def f(n): return parser.parse(FakeLexer([ Token("NUMBER", str(n)), Token("PLUS", "+"), Token("NUMBER", str(n)) ])).getint() assert self.run(f, [12]) == 24 def test_state(self): pg = ParserGenerator(["NUMBER", "PLUS"], precedence=[ ("left", ["PLUS"]), ]) @pg.production("main : expression") def main(state, p): state.count += 1 return p[0] @pg.production("expression : expression PLUS expression") def expression_plus(state, p): state.count += 1 return BoxInt(p[0].getint() + p[2].getint()) @pg.production("expression : NUMBER") def expression_number(state, p): state.count += 1 return BoxInt(int(p[0].getstr())) parser = pg.build() def f(): state = ParserState() return parser.parse(FakeLexer([ Token("NUMBER", "10"), Token("PLUS", "+"), Token("NUMBER", "12"), Token("PLUS", "+"), Token("NUMBER", "-2"), ]), state=state).getint() + state.count assert self.run(f, []) == 26
import sys, os extensions = [] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'zambiaureport' copyright = u'2014, Andre Lesa' version = '0.1' release = '0.1' exclude_patterns = ['_build'] pygments_style = 'sphinx' html_theme = 'default' html_static_path = ['_static'] htmlhelp_basename = 'zambiaureportdoc' latex_elements = { } latex_documents = [ ('index', 'zambiaureport.tex', u'zambiaureport Documentation', u'Andre Lesa', 'manual'), ] man_pages = [ ('index', 'zambiaureport', u'zambiaureport Documentation', [u'Andre Lesa'], 1) ] texinfo_documents = [ ('index', 'zambiaureport', u'zambiaureport Documentation', u'Andre Lesa', 'zambiaureport', 'Zambia U-Report reference implementation.','Miscellaneous'), ]
''' ''' import logging # isort:skip log = logging.getLogger(__name__) from .notebook import run_notebook_hook from .state import curstate __all__ = ( 'output_file', 'output_notebook', 'reset_output', ) def output_file(filename, title="Bokeh Plot", mode=None, root_dir=None): '''Configure the default output state to generate output saved to a file when :func:`show` is called. Does not change the current ``Document`` from ``curdoc()``. File and notebook output may be active at the same time, so e.g., this does not clear the effects of ``output_notebook()``. Args: filename (str) : a filename for saving the HTML document title (str, optional) : a title for the HTML document (default: "Bokeh Plot") mode (str, optional) : how to include BokehJS (default: ``'cdn'``) One of: ``'inline'``, ``'cdn'``, ``'relative(-dev)'`` or ``'absolute(-dev)'``. See :class:`bokeh.resources.Resources` for more details. root_dir (str, optional) : root directory to use for 'absolute' resources. (default: None) This value is ignored for other resource types, e.g. ``INLINE`` or ``CDN``. Returns: None .. note:: Generally, this should be called at the beginning of an interactive session or the top of a script. .. warning:: This output file will be overwritten on every save, e.g., each time show() or save() is invoked. ''' curstate().output_file( filename, title=title, mode=mode, root_dir=root_dir ) def output_notebook(resources=None, verbose=False, hide_banner=False, load_timeout=5000, notebook_type='jupyter'): ''' Configure the default output state to generate output in notebook cells when :func:`show` is called. Note that, :func:`show` may be called multiple times in a single cell to display multiple objects in the output cell. The objects will be displayed in order. Args: resources (Resource, optional) : How and where to load BokehJS from (default: CDN) verbose (bool, optional) : whether to display detailed BokehJS banner (default: False) hide_banner (bool, optional): whether to hide the Bokeh banner (default: False) load_timeout (int, optional) : Timeout in milliseconds when plots assume load timed out (default: 5000) notebook_type (string, optional): Notebook type (default: jupyter) Returns: None .. note:: Generally, this should be called at the beginning of an interactive session or the top of a script. ''' # verify notebook_type first in curstate().output_notebook curstate().output_notebook(notebook_type) run_notebook_hook(notebook_type, 'load', resources, verbose, hide_banner, load_timeout) def reset_output(state=None): ''' Clear the default state of all output modes. Returns: None ''' curstate().reset()
from django import template from django.utils.safestring import mark_safe from mezzanine.conf import settings from mezzanine_developer_extension.utils import refactor_html register = template.Library() _prefix = "mezzanine_developer_extension.styles" try: if settings.TERMINAL_STYLE not in \ ["%s.macos" % _prefix, "%s.ubuntu" % _prefix, "%s.windows" % _prefix]: # If the user has specified a wrong terminal styling format, we # raise an exception warning about this. msg = "Wrong terminal style format. Check the value of TERMINAL_STYLE"\ " in your settings.py file." raise Exception(msg) except AttributeError: msg = "You have not specified a terminal output format. You have to"\ " define the attribute TERMINAL_STYLE in your settings.py" raise Exception(msg) @register.filter(name='safe_developer') def safe_developer(content, style="macos"): """ Renders content without cleaning the original. Replaces the terminal divs for a more complext html layout. """ new_content = refactor_html(content, style) return mark_safe(new_content)
from datetime import datetime from pymongo.connection import Connection from django.db import models from eventtracker.conf import settings def get_mongo_collection(): "Open a connection to MongoDB and return the collection to use." if settings.RIGHT_MONGODB_HOST: connection = Connection.paired( left=(settings.MONGODB_HOST, settings.MONGODB_PORT), right=(settings.RIGHT_MONGODB_HOST, settings.RIGHT_MONGODB_PORT) ) else: connection = Connection(host=settings.MONGODB_HOST, port=settings.MONGODB_PORT) return connection[settings.MONGODB_DB][settings.MONGODB_COLLECTION] def save_event(collection, event, timestamp, params): "Save the event in MongoDB collection" collection.insert({ 'event': event, 'timestamp': datetime.fromtimestamp(timestamp), 'params': params }) class Event(models.Model): "Dummy model for development." timestamp = models.DateTimeField(auto_now_add=True) event = models.SlugField() params = models.TextField()
import sys sys.path.append(sys.path.insert(0,"../src")) def urlopen(*args, **kwargs): # Only parse one arg: the url return Urls[args[0]] from io import StringIO from time import time import re from nose.tools import with_setup class MockUrlContent(StringIO): def __init__(self, content): super(MockUrlContent, self).__init__(content) self.headers = { 'last-modified': time() } def close(self): pass scheme_re = re.compile(r'file:(/+)?') class MockUrlCache(dict): def __setitem__(self, name, content): super(MockUrlCache, self).__setitem__(name, MockUrlContent(content)) def __getitem__(self, name): if name in self: return super(MockUrlCache, self).__getitem__(name) # Strip off 'file:[///]' from url elif name.startswith('file:'): try: name= scheme_re.sub('', name) return super(MockUrlCache, self).__getitem__(name) except: # Fall through pass # urlopen raises ValueError if unable to load content (not KeyError) raise ValueError("{0}: Cannot find file content".format(name)) Urls = MockUrlCache() def clear_configs(): pass @with_setup(clear_configs) def testImportContent(): "Cannot import content from a file" from xmlconfig import getConfig Urls.clear() Urls["file:file.txt"] = "Content embedded in a file" Urls["config.xml"] = \ u"""<?xml version="1.0" encoding="utf-8"?> <config> <constants> <string key="import" src="file:file.txt"/> </constants> </config> """ conf=getConfig() conf.load("config.xml") assert conf.get("import") == "Content embedded in a file" @with_setup(clear_configs) def testImportConfig(): "Cannot import another config file" from xmlconfig import getConfig Urls.clear() Urls["config2.xml"] = \ """<?xml version="1.0"?> <config> <constants> <string key="key22">This was imported from config2.xml</string> </constants> </config> """ Urls["config.xml"] = \ u"""<?xml version="1.0" encoding="utf-8"?> <config> <constants namespace="import" src="file:config2.xml"/> <constants> <string key="imported">%(import:key22)</string> </constants> </config> """ conf=getConfig() conf.load("config.xml") assert conf.get("imported") == "This was imported from config2.xml" @with_setup(clear_configs) def testCircularImport(): "Property detect circluar importing" from xmlconfig import getConfig Urls.clear() Urls["config2.xml"] = \ """<?xml version="1.0"?> <config> <constants namespace="circular" src="file:config.xml"/> <constants> <string key="key22">This was imported from config2.xml</string> <string key="foreign"> Namespace changed in %(circular:key4.import) </string> </constants> </config> """ Urls["config.xml"] = \ """<?xml version="1.0"?> <config> <constants namespace="import" src="file:config2.xml"/> <constants> <section key="key4"> <string key="key5">value2</string> <string key="import">%(import:key22)</string> </section> </constants> </config> """ conf=getConfig() conf.load("config.xml") assert conf.get("import:foreign") == \ "Namespace changed in This was imported from config2.xml" @with_setup(clear_configs) def testRelativeImport(): """Transfer leading absolute or relative path to the location of documents imported""" from xmlconfig import getConfig Urls["../config/config2.xml"] = \ """<?xml version="1.0"?> <config> <constants> <string key="key22">This was imported from config2.xml</string> </constants> </config> """ Urls["../config/config.xml"] = \ """<?xml version="1.0" encoding="utf-8"?> <config> <constants namespace="import" src="file:config2.xml"/> <constants> <string key="imported">%(import:key22)</string> </constants> </config> """ conf=getConfig() conf.load("../config/config.xml") assert conf.get("imported") == "This was imported from config2.xml"
import sys, os cwd = os.getcwd() parent = os.path.dirname(cwd) sys.path.append(parent) import organigrammi extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'openpa-organigrammi' copyright = u'2014, Simone Dalla' version = organigrammi.__version__ release = organigrammi.__version__ exclude_patterns = ['_build'] pygments_style = 'sphinx' html_theme = 'default' html_static_path = ['_static'] htmlhelp_basename = 'openpa-organigrammidoc' latex_elements = { } latex_documents = [ ('index', 'openpa-organigrammi.tex', u'openpa-organigrammi Documentation', u'Simone Dalla', 'manual'), ] man_pages = [ ('index', 'openpa-organigrammi', u'openpa-organigrammi Documentation', [u'Simone Dalla'], 1) ] texinfo_documents = [ ('index', 'openpa-organigrammi', u'openpa-organigrammi Documentation', u'Simone Dalla', 'openpa-organigrammi', 'One line description of project.', 'Miscellaneous'), ]
"""Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "$Format:%d$" git_full = "$Format:%H$" git_date = "$Format:%ci$" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "v" cfg.parentdir_prefix = "" cfg.versionfile_source = "jxl2txt/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Create decorator to mark a method as the handler of a VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip().decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() # Use only the last line. Previous lines may contain GPG signature # information. date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post0.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post0.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post0.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post0.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None}
from . import Cl, conformalize layout_orig, blades_orig = Cl(3) layout, blades, stuff = conformalize(layout_orig) locals().update(blades) locals().update(stuff) layout.__name__ = 'layout' layout.__module__ = __name__
""" Unit tests to ensure that we can call reset_traits/delete on a property trait (regression tests for Github issue #67). """ from traits import _py2to3 from traits.api import Any, HasTraits, Int, Property, TraitError from traits.testing.unittest_tools import unittest class E(HasTraits): a = Property(Any) b = Property(Int) class TestPropertyDelete(unittest.TestCase): def test_property_delete(self): e = E() with self.assertRaises(TraitError): del e.a with self.assertRaises(TraitError): del e.b def test_property_reset_traits(self): e = E() unresetable = e.reset_traits() _py2to3.assertCountEqual(self, unresetable, ['a', 'b'])
from django import forms from ncdjango.interfaces.arcgis.form_fields import SrField class PointForm(forms.Form): x = forms.FloatField() y = forms.FloatField() projection = SrField()
from .fetchers import NUPermissionsFetcher from .fetchers import NUMetadatasFetcher from .fetchers import NUGlobalMetadatasFetcher from bambou import NURESTObject class NUAvatar(NURESTObject): """ Represents a Avatar in the VSD Notes: Avatar """ __rest_name__ = "avatar" __resource_name__ = "avatars" ## Constants CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL" CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE" def __init__(self, **kwargs): """ Initializes a Avatar instance Notes: You can specify all parameters while calling this methods. A special argument named `data` will enable you to load the object from a Python dictionary Examples: >>> avatar = NUAvatar(id=u'xxxx-xxx-xxx-xxx', name=u'Avatar') >>> avatar = NUAvatar(data=my_dict) """ super(NUAvatar, self).__init__() # Read/Write Attributes self._last_updated_by = None self._last_updated_date = None self._embedded_metadata = None self._entity_scope = None self._creation_date = None self._owner = None self._external_id = None self._type = None self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False) self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL']) self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True) self.expose_attribute(local_name="type", remote_name="type", attribute_type=str, is_required=False, is_unique=False) # Fetchers self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child") self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self._compute_args(**kwargs) # Properties @property def last_updated_by(self): """ Get last_updated_by value. Notes: ID of the user who last updated the object. This attribute is named `lastUpdatedBy` in VSD API. """ return self._last_updated_by @last_updated_by.setter def last_updated_by(self, value): """ Set last_updated_by value. Notes: ID of the user who last updated the object. This attribute is named `lastUpdatedBy` in VSD API. """ self._last_updated_by = value @property def last_updated_date(self): """ Get last_updated_date value. Notes: Time stamp when this object was last updated. This attribute is named `lastUpdatedDate` in VSD API. """ return self._last_updated_date @last_updated_date.setter def last_updated_date(self, value): """ Set last_updated_date value. Notes: Time stamp when this object was last updated. This attribute is named `lastUpdatedDate` in VSD API. """ self._last_updated_date = value @property def embedded_metadata(self): """ Get embedded_metadata value. Notes: Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration. This attribute is named `embeddedMetadata` in VSD API. """ return self._embedded_metadata @embedded_metadata.setter def embedded_metadata(self, value): """ Set embedded_metadata value. Notes: Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration. This attribute is named `embeddedMetadata` in VSD API. """ self._embedded_metadata = value @property def entity_scope(self): """ Get entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level This attribute is named `entityScope` in VSD API. """ return self._entity_scope @entity_scope.setter def entity_scope(self, value): """ Set entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level This attribute is named `entityScope` in VSD API. """ self._entity_scope = value @property def creation_date(self): """ Get creation_date value. Notes: Time stamp when this object was created. This attribute is named `creationDate` in VSD API. """ return self._creation_date @creation_date.setter def creation_date(self, value): """ Set creation_date value. Notes: Time stamp when this object was created. This attribute is named `creationDate` in VSD API. """ self._creation_date = value @property def owner(self): """ Get owner value. Notes: Identifies the user that has created this object. """ return self._owner @owner.setter def owner(self, value): """ Set owner value. Notes: Identifies the user that has created this object. """ self._owner = value @property def external_id(self): """ Get external_id value. Notes: External object ID. Used for integration with third party systems This attribute is named `externalID` in VSD API. """ return self._external_id @external_id.setter def external_id(self, value): """ Set external_id value. Notes: External object ID. Used for integration with third party systems This attribute is named `externalID` in VSD API. """ self._external_id = value @property def type(self): """ Get type value. Notes: The image type """ return self._type @type.setter def type(self, value): """ Set type value. Notes: The image type """ self._type = value
""" Display currently playing song from Google Play Music Desktop Player. Configuration parameters: cache_timeout: how often we refresh this module in seconds (default 5) format: specify the items and ordering of the data in the status bar. These area 1:1 match to gpmdp-remote's options (default is '♫ {info}'). Format of status string placeholders: See `gpmdp-remote help`. Simply surround the items you want displayed (i.e. `album`) with curly braces (i.e. `{album}`) and place as-desired in the format string. {info} Print info about now playing song {title} Print current song title {artist} Print current song artist {album} Print current song album {album_art} Print current song album art URL {time_current} Print current song time in milliseconds {time_total} Print total song time in milliseconds {status} Print whether GPMDP is paused or playing {current} Print now playing song in "artist - song" format {help} Print this help message Requires: gpmdp: http://www.googleplaymusicdesktopplayer.com/ gpmdp-remote: https://github.com/iandrewt/gpmdp-remote @author Aaron Fields https://twitter.com/spirotot @license BSD """ from time import time from subprocess import check_output class Py3status: """ """ # available configuration parameters cache_timeout = 5 format = u'♫ {info}' @staticmethod def _run_cmd(cmd): return check_output(['gpmdp-remote', cmd]).decode('utf-8').strip() def gpmdp(self, i3s_output_list, i3s_config): if self._run_cmd('status') == 'Paused': result = '' else: cmds = ['info', 'title', 'artist', 'album', 'status', 'current', 'time_total', 'time_current', 'album_art'] data = {} for cmd in cmds: if '{%s}' % cmd in self.format: data[cmd] = self._run_cmd(cmd) result = self.format.format(**data) response = { 'cached_until': time() + self.cache_timeout, 'full_text': result } return response if __name__ == "__main__": """ Run module in test mode. """ from py3status.module_test import module_test module_test(Py3status)
import os os.environ['OS_AUTH_URL'] = "https://keystone.rc.nectar.org.au:5000/v2.0/" os.environ['OS_TENANT_ID'] = "123456789012345678901234567890" os.environ['OS_TENANT_NAME'] = "tenant_name" os.environ['OS_USERNAME'] = "joe.bloggs@uni.edu.au" os.environ['OS_PASSWORD'] = "????????????????????"
from __future__ import absolute_import from .local import Local # noqa from .production import Production # noqa from .celery import app as celery_app
import matplotlib.pyplot as plt import numpy as np import scalpplot from scalpplot import plot_scalp from positions import POS_10_5 from scipy import signal def plot_timeseries(frames, time=None, offset=None, color='k', linestyle='-'): frames = np.asarray(frames) if offset == None: offset = np.max(np.std(frames, axis=0)) * 3 if time == None: time = np.arange(frames.shape[0]) plt.plot(time, frames - np.mean(frames, axis=0) + np.arange(frames.shape[1]) * offset, color=color, ls=linestyle) def plot_scalpgrid(scalps, sensors, locs=POS_10_5, width=None, clim=None, cmap=None, titles=None): ''' Plots a grid with scalpplots. Scalps contains the different scalps in the rows, sensors contains the names for the columns of scalps, locs is a dict that maps the sensor-names to locations. Width determines the width of the grid that contains the plots. Cmap selects a colormap, for example plt.cm.RdBu_r is very useful for AUC-ROC plots. Clim is a list containing the minimim and maximum value mapped to a color. Titles is an optional list with titles for each subplot. Returns a list with subplots for further manipulation. ''' scalps = np.asarray(scalps) assert scalps.ndim == 2 nscalps = scalps.shape[0] subplots = [] if not width: width = int(min(8, np.ceil(np.sqrt(nscalps)))) height = int(np.ceil(nscalps/float(width))) if not clim: clim = [np.min(scalps), np.max(scalps)] plt.clf() for i in range(nscalps): subplots.append(plt.subplot(height, width, i + 1)) plot_scalp(scalps[i], sensors, locs, clim=clim, cmap=cmap) if titles: plt.title(titles[i]) # plot colorbar next to last scalp bb = plt.gca().get_position() plt.colorbar(cax=plt.axes([bb.xmax + bb.width/10, bb.ymin, bb.width/10, bb.height]), ticks=np.linspace(clim[0], clim[1], 5).round(2)) return subplots
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('setlist', '0012_remove_show_leg'), ] operations = [ migrations.CreateModel( name='Show2', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('venue', models.ForeignKey(to='setlist.Venue', to_field='id')), ('tour', models.ForeignKey(to='setlist.Tour', to_field='id')), ('date', models.DateField(db_index=True)), ('setlist', models.TextField(default=b'', blank=True)), ('notes', models.TextField(default=b'', blank=True)), ('source', models.TextField(default=b'', blank=True)), ], options={ }, bases=(models.Model,), ), ]
import sys from os.path import * import os from pyflann import * from copy import copy from numpy import * from numpy.random import * import unittest class Test_PyFLANN_nn(unittest.TestCase): def setUp(self): self.nn = FLANN(log_level="warning") ################################################################################ # The typical def test_nn_2d_10pt(self): self.__nd_random_test_autotune(2, 2) def test_nn_autotune_2d_1000pt(self): self.__nd_random_test_autotune(2, 1000) def test_nn_autotune_100d_1000pt(self): self.__nd_random_test_autotune(100, 1000) def test_nn_autotune_500d_100pt(self): self.__nd_random_test_autotune(500, 100) # # ########################################################################################## # # Stress it should handle # def test_nn_stress_1d_1pt_kmeans_autotune(self): self.__nd_random_test_autotune(1, 1) def __ensure_list(self,arg): if type(arg)!=list: return [arg] else: return arg def __nd_random_test_autotune(self, dim, N, num_neighbors = 1, **kwargs): """ Make a set of random points, then pass the same ones to the query points. Each point should be closest to itself. """ seed(0) x = rand(N, dim) xq = rand(N, dim) perm = permutation(N) # compute ground truth nearest neighbors gt_idx, gt_dist = self.nn.nn(x,xq, algorithm='linear', num_neighbors=num_neighbors) for tp in [0.70, 0.80, 0.90]: nidx,ndist = self.nn.nn(x, xq, algorithm='autotuned', sample_fraction=1.0, num_neighbors = num_neighbors, target_precision = tp, checks=-2, **kwargs) correctness = 0.0 for i in xrange(N): l1 = self.__ensure_list(nidx[i]) l2 = self.__ensure_list(gt_idx[i]) correctness += float(len(set(l1).intersection(l2)))/num_neighbors correctness /= N self.assert_(correctness >= tp*0.9, 'failed #1: targ_prec=%f, N=%d,correctness=%f' % (tp, N, correctness)) if __name__ == '__main__': unittest.main()
from collections import OrderedDict import locale from optparse import make_option from verify.management.commands import VerifyBaseCommand from verify.models import * from verify.politici_models import * from django.db.models import Q, Count __author__ = 'guglielmo' class Command(VerifyBaseCommand): """ Report delle statistiche di genere complessive, a livello nazionale, per tutti gli organi di tutte le istituzioni. Può limitarsi a una o più istituzioni, se si passa un elenco di institution_id """ args = '<institution_id institution_id ...>' help = "Check that all locations have only male components (list locations with female components)." option_list = VerifyBaseCommand.option_list def execute_verification(self, *args, **options): self.csv_headers = ["ISTITUZIONE", "INCARICO", "N_DONNE", "N_UOMINI", "N_TOTALI", "PERC_DONNE", "PERC_UOMINI"] institutions = OpInstitution.objects.using('politici').all() if args: institutions = institutions.filter(id__in=args) self.logger.info( "Verification {0} launched with institutions limited to {1}".format( self.__class__.__module__, ",".join(institutions.values_list('id', flat=True)) ) ) else: self.logger.info( "Verification {0} launched for all institutions".format( self.__class__.__module__ ) ) self.ok_locs = [] self.ko_locs = [] for institution in institutions: charge_types_ids = OpInstitutionCharge.objects.using('politici').\ filter(date_end__isnull=True, content__deleted_at__isnull=True).\ filter(institution=institution).\ values_list('charge_type', flat=True).\ distinct() charge_types = OpChargeType.objects.using('politici').\ filter(id__in=charge_types_ids) for charge_type in charge_types: self.logger.info( "Counting {0} in {1}".format( charge_type.name, institution.name ) ) qs = OpInstitutionCharge.objects.using('politici').\ filter(date_end__isnull=True, content__deleted_at__isnull=True).\ filter(institution=institution, charge_type=charge_type) n_tot = qs.count() n_fem = qs.filter(politician__sex__iexact='f').count() n_mal = n_tot - n_fem merged = [institution.name, charge_type.name, n_fem, n_mal, n_tot,] merged.append(locale.format("%.2f",100. * n_fem / float(n_tot) )) merged.append(locale.format("%.2f",100. * n_mal / float(n_tot) )) self.ko_locs.append(merged) outcome = Verification.OUTCOME.failed self.logger.info( "Report for {0} institutions generated.".format( len(self.ko_locs) ) ) return outcome
from distutils.core import setup setup(name='django-modeltranslation', version='0.4.0-alpha1', description='Translates Django models using a registration approach.', long_description='The modeltranslation application can be used to ' 'translate dynamic content of existing models to an ' 'arbitrary number of languages without having to ' 'change the original model classes. It uses a ' 'registration approach (comparable to Django\'s admin ' 'app) to be able to add translations to existing or ' 'new projects and is fully integrated into the Django ' 'admin backend.', author='Peter Eschler', author_email='p.eschler@nmy.de', maintainer='Dirk Eschler', maintainer_email='d.eschler@nmy.de', url='http://code.google.com/p/django-modeltranslation/', packages=['modeltranslation', 'modeltranslation.management', 'modeltranslation.management.commands'], package_data={'modeltranslation': ['static/modeltranslation/css/*.css', 'static/modeltranslation/js/*.js']}, include_package_data = True, requires=['django(>=1.0)'], download_url='http://django-modeltranslation.googlecode.com/files/django-modeltranslation-0.4.0-alpha1.tar.gz', classifiers=['Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License'], license='New BSD')
"""Test for Credential cache library.""" __copyright__ = 'Copyright (C) 2009, Purdue University' __license__ = 'BSD' __version__ = '#TRUNK#' import unittest import os import roster_core from roster_server import credentials CONFIG_FILE = 'test_data/roster.conf' # Example in test_data SCHEMA_FILE = '../roster-core/data/database_schema.sql' DATA_FILE = 'test_data/test_data.sql' class TestCredentialsLibrary(unittest.TestCase): def setUp(self): self.config_instance = roster_core.Config(file_name=CONFIG_FILE) self.cred_instance = credentials.CredCache(self.config_instance, u'sharrell') db_instance = self.config_instance.GetDb() db_instance.CreateRosterDatabase() data = open(DATA_FILE, 'r').read() db_instance.StartTransaction() db_instance.cursor.execute(data) db_instance.EndTransaction() db_instance.close() self.core_instance = roster_core.Core(u'sharrell', self.config_instance) def is_valid_uuid (self, uuid): """ TAKEN FROM THE BLUEZ MODULE is_valid_uuid (uuid) -> bool returns True if uuid is a valid 128-bit UUID. valid UUIDs are always strings taking one of the following forms: XXXX XXXXXXXX XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX where each X is a hexadecimal digit (case insensitive) """ try: if len (uuid) == 4: if int (uuid, 16) < 0: return False elif len (uuid) == 8: if int (uuid, 16) < 0: return False elif len (uuid) == 36: pieces = uuid.split ("-") if len (pieces) != 5 or \ len (pieces[0]) != 8 or \ len (pieces[1]) != 4 or \ len (pieces[2]) != 4 or \ len (pieces[3]) != 4 or \ len (pieces[4]) != 12: return False [ int (p, 16) for p in pieces ] else: return False except ValueError: return False except TypeError: return False return True def testCredentials(self): self.assertTrue(self.cred_instance.Authenticate(u'sharrell', 'test')) cred_string = self.cred_instance.GetCredentials(u'sharrell', 'test', self.core_instance) self.assertEqual(self.cred_instance.CheckCredential(cred_string, u'sharrell', self.core_instance), u'') self.assertEqual(self.cred_instance.CheckCredential(u'test', u'sharrell', self.core_instance), None) if( __name__ == '__main__' ): unittest.main()
import sys import hyperdex.client from hyperdex.client import LessEqual, GreaterEqual, Range, Regex, LengthEquals, LengthLessEqual, LengthGreaterEqual c = hyperdex.client.Client(sys.argv[1], int(sys.argv[2])) def to_objectset(xs): return set([frozenset(x.items()) for x in xs]) assert c.put('kv', 'k', {}) == True assert c.get('kv', 'k') == {'v': {}} assert c.put('kv', 'k', {'v': {1: 3.14, 2: 0.25, 3: 1.0}}) == True assert c.get('kv', 'k') == {'v': {1: 3.14, 2: 0.25, 3: 1.0}} assert c.put('kv', 'k', {'v': {}}) == True assert c.get('kv', 'k') == {'v': {}}
""" Room Typeclasses for the TutorialWorld. This defines special types of Rooms available in the tutorial. To keep everything in one place we define them together with the custom commands needed to control them. Those commands could also have been in a separate module (e.g. if they could have been re-used elsewhere.) """ from __future__ import print_function import random from evennia import TICKER_HANDLER from evennia import CmdSet, Command, DefaultRoom from evennia import utils, create_object, search_object from evennia import syscmdkeys, default_cmds from evennia.contrib.tutorial_world.objects import LightSource from django.conf import settings _SEARCH_AT_RESULT = utils.object_from_module(settings.SEARCH_AT_RESULT) class CmdTutorial(Command): """ Get help during the tutorial Usage: tutorial [obj] This command allows you to get behind-the-scenes info about an object or the current location. """ key = "tutorial" aliases = ["tut"] locks = "cmd:all()" help_category = "TutorialWorld" def func(self): """ All we do is to scan the current location for an Attribute called `tutorial_info` and display that. """ caller = self.caller if not self.args: target = self.obj # this is the room the command is defined on else: target = caller.search(self.args.strip()) if not target: return helptext = target.db.tutorial_info if helptext: caller.msg("|G%s|n" % helptext) else: caller.msg("|RSorry, there is no tutorial help available here.|n") class CmdTutorialSetDetail(default_cmds.MuxCommand): """ sets a detail on a room Usage: @detail <key> = <description> @detail <key>;<alias>;... = description Example: @detail walls = The walls are covered in ... @detail castle;ruin;tower = The distant ruin ... This sets a "detail" on the object this command is defined on (TutorialRoom for this tutorial). This detail can be accessed with the TutorialRoomLook command sitting on TutorialRoom objects (details are set as a simple dictionary on the room). This is a Builder command. We custom parse the key for the ;-separator in order to create multiple aliases to the detail all at once. """ key = "@detail" locks = "cmd:perm(Builder)" help_category = "TutorialWorld" def func(self): """ All this does is to check if the object has the set_detail method and uses it. """ if not self.args or not self.rhs: self.caller.msg("Usage: @detail key = description") return if not hasattr(self.obj, "set_detail"): self.caller.msg("Details cannot be set on %s." % self.obj) return for key in self.lhs.split(";"): # loop over all aliases, if any (if not, this will just be # the one key to loop over) self.obj.set_detail(key, self.rhs) self.caller.msg("Detail set: '%s': '%s'" % (self.lhs, self.rhs)) class CmdTutorialLook(default_cmds.CmdLook): """ looks at the room and on details Usage: look <obj> look <room detail> look *<account> Observes your location, details at your location or objects in your vicinity. Tutorial: This is a child of the default Look command, that also allows us to look at "details" in the room. These details are things to examine and offers some extra description without actually having to be actual database objects. It uses the return_detail() hook on TutorialRooms for this. """ # we don't need to specify key/locks etc, this is already # set by the parent. help_category = "TutorialWorld" def func(self): """ Handle the looking. This is a copy of the default look code except for adding in the details. """ caller = self.caller args = self.args if args: # we use quiet=True to turn off automatic error reporting. # This tells search that we want to handle error messages # ourself. This also means the search function will always # return a list (with 0, 1 or more elements) rather than # result/None. looking_at_obj = caller.search(args, # note: excludes room/room aliases candidates=caller.location.contents + caller.contents, use_nicks=True, quiet=True) if len(looking_at_obj) != 1: # no target found or more than one target found (multimatch) # look for a detail that may match detail = self.obj.return_detail(args) if detail: self.caller.msg(detail) return else: # no detail found, delegate our result to the normal # error message handler. _SEARCH_AT_RESULT(None, caller, args, looking_at_obj) return else: # we found a match, extract it from the list and carry on # normally with the look handling. looking_at_obj = looking_at_obj[0] else: looking_at_obj = caller.location if not looking_at_obj: caller.msg("You have no location to look at!") return if not hasattr(looking_at_obj, 'return_appearance'): # this is likely due to us having an account instead looking_at_obj = looking_at_obj.character if not looking_at_obj.access(caller, "view"): caller.msg("Could not find '%s'." % args) return # get object's appearance caller.msg(looking_at_obj.return_appearance(caller)) # the object's at_desc() method. looking_at_obj.at_desc(looker=caller) return class TutorialRoomCmdSet(CmdSet): """ Implements the simple tutorial cmdset. This will overload the look command in the default CharacterCmdSet since it has a higher priority (ChracterCmdSet has prio 0) """ key = "tutorial_cmdset" priority = 1 def at_cmdset_creation(self): """add the tutorial-room commands""" self.add(CmdTutorial()) self.add(CmdTutorialSetDetail()) self.add(CmdTutorialLook()) class TutorialRoom(DefaultRoom): """ This is the base room type for all rooms in the tutorial world. It defines a cmdset on itself for reading tutorial info about the location. """ def at_object_creation(self): """Called when room is first created""" self.db.tutorial_info = "This is a tutorial room. It allows you to use the 'tutorial' command." self.cmdset.add_default(TutorialRoomCmdSet) def at_object_receive(self, new_arrival, source_location): """ When an object enter a tutorial room we tell other objects in the room about it by trying to call a hook on them. The Mob object uses this to cheaply get notified of enemies without having to constantly scan for them. Args: new_arrival (Object): the object that just entered this room. source_location (Object): the previous location of new_arrival. """ if new_arrival.has_account and not new_arrival.is_superuser: # this is a character for obj in self.contents_get(exclude=new_arrival): if hasattr(obj, "at_new_arrival"): obj.at_new_arrival(new_arrival) def return_detail(self, detailkey): """ This looks for an Attribute "obj_details" and possibly returns the value of it. Args: detailkey (str): The detail being looked at. This is case-insensitive. """ details = self.db.details if details: return details.get(detailkey.lower(), None) def set_detail(self, detailkey, description): """ This sets a new detail, using an Attribute "details". Args: detailkey (str): The detail identifier to add (for aliases you need to add multiple keys to the same description). Case-insensitive. description (str): The text to return when looking at the given detailkey. """ if self.db.details: self.db.details[detailkey.lower()] = description else: self.db.details = {detailkey.lower(): description} WEATHER_STRINGS = ( "The rain coming down from the iron-grey sky intensifies.", "A gust of wind throws the rain right in your face. Despite your cloak you shiver.", "The rainfall eases a bit and the sky momentarily brightens.", "For a moment it looks like the rain is slowing, then it begins anew with renewed force.", "The rain pummels you with large, heavy drops. You hear the rumble of thunder in the distance.", "The wind is picking up, howling around you, throwing water droplets in your face. It's cold.", "Bright fingers of lightning flash over the sky, moments later followed by a deafening rumble.", "It rains so hard you can hardly see your hand in front of you. You'll soon be drenched to the bone.", "Lightning strikes in several thundering bolts, striking the trees in the forest to your west.", "You hear the distant howl of what sounds like some sort of dog or wolf.", "Large clouds rush across the sky, throwing their load of rain over the world.") class WeatherRoom(TutorialRoom): """ This should probably better be called a rainy room... This sets up an outdoor room typeclass. At irregular intervals, the effects of weather will show in the room. Outdoor rooms should inherit from this. """ def at_object_creation(self): """ Called when object is first created. We set up a ticker to update this room regularly. Note that we could in principle also use a Script to manage the ticking of the room; the TickerHandler works fine for simple things like this though. """ super(WeatherRoom, self).at_object_creation() # subscribe ourselves to a ticker to repeatedly call the hook # "update_weather" on this object. The interval is randomized # so as to not have all weather rooms update at the same time. self.db.interval = random.randint(50, 70) TICKER_HANDLER.add(interval=self.db.interval, callback=self.update_weather, idstring="tutorial") # this is parsed by the 'tutorial' command on TutorialRooms. self.db.tutorial_info = \ "This room has a Script running that has it echo a weather-related message at irregular intervals." def update_weather(self, *args, **kwargs): """ Called by the tickerhandler at regular intervals. Even so, we only update 20% of the time, picking a random weather message when we do. The tickerhandler requires that this hook accepts any arguments and keyword arguments (hence the *args, **kwargs even though we don't actually use them in this example) """ if random.random() < 0.2: # only update 20 % of the time self.msg_contents("|w%s|n" % random.choice(WEATHER_STRINGS)) SUPERUSER_WARNING = "\nWARNING: You are playing as a superuser ({name}). Use the {quell} command to\n" \ "play without superuser privileges (many functions and puzzles ignore the \n" \ "presence of a superuser, making this mode useful for exploring things behind \n" \ "the scenes later).\n" \ class IntroRoom(TutorialRoom): """ Intro room properties to customize: char_health - integer > 0 (default 20) """ def at_object_creation(self): """ Called when the room is first created. """ super(IntroRoom, self).at_object_creation() self.db.tutorial_info = "The first room of the tutorial. " \ "This assigns the health Attribute to "\ "the account." def at_object_receive(self, character, source_location): """ Assign properties on characters """ # setup character for the tutorial health = self.db.char_health or 20 if character.has_account: character.db.health = health character.db.health_max = health if character.is_superuser: string = "-" * 78 + SUPERUSER_WARNING + "-" * 78 character.msg("|r%s|n" % string.format(name=character.key, quell="|w@quell|r")) class CmdEast(Command): """ Go eastwards across the bridge. Tutorial info: This command relies on the caller having two Attributes (assigned by the room when entering): - east_exit: a unique name or dbref to the room to go to when exiting east. - west_exit: a unique name or dbref to the room to go to when exiting west. The room must also have the following Attributes - tutorial_bridge_posistion: the current position on on the bridge, 0 - 4. """ key = "east" aliases = ["e"] locks = "cmd:all()" help_category = "TutorialWorld" def func(self): """move one step eastwards""" caller = self.caller bridge_step = min(5, caller.db.tutorial_bridge_position + 1) if bridge_step > 4: # we have reached the far east end of the bridge. # Move to the east room. eexit = search_object(self.obj.db.east_exit) if eexit: caller.move_to(eexit[0]) else: caller.msg("No east exit was found for this room. Contact an admin.") return caller.db.tutorial_bridge_position = bridge_step # since we are really in one room, we have to notify others # in the room when we move. caller.location.msg_contents("%s steps eastwards across the bridge." % caller.name, exclude=caller) caller.execute_cmd("look") class CmdWest(Command): """ Go westwards across the bridge. Tutorial info: This command relies on the caller having two Attributes (assigned by the room when entering): - east_exit: a unique name or dbref to the room to go to when exiting east. - west_exit: a unique name or dbref to the room to go to when exiting west. The room must also have the following property: - tutorial_bridge_posistion: the current position on on the bridge, 0 - 4. """ key = "west" aliases = ["w"] locks = "cmd:all()" help_category = "TutorialWorld" def func(self): """move one step westwards""" caller = self.caller bridge_step = max(-1, caller.db.tutorial_bridge_position - 1) if bridge_step < 0: # we have reached the far west end of the bridge. # Move to the west room. wexit = search_object(self.obj.db.west_exit) if wexit: caller.move_to(wexit[0]) else: caller.msg("No west exit was found for this room. Contact an admin.") return caller.db.tutorial_bridge_position = bridge_step # since we are really in one room, we have to notify others # in the room when we move. caller.location.msg_contents("%s steps westwards across the bridge." % caller.name, exclude=caller) caller.execute_cmd("look") BRIDGE_POS_MESSAGES = ("You are standing |wvery close to the the bridge's western foundation|n." " If you go west you will be back on solid ground ...", "The bridge slopes precariously where it extends eastwards" " towards the lowest point - the center point of the hang bridge.", "You are |whalfways|n out on the unstable bridge.", "The bridge slopes precariously where it extends westwards" " towards the lowest point - the center point of the hang bridge.", "You are standing |wvery close to the bridge's eastern foundation|n." " If you go east you will be back on solid ground ...") BRIDGE_MOODS = ("The bridge sways in the wind.", "The hanging bridge creaks dangerously.", "You clasp the ropes firmly as the bridge sways and creaks under you.", "From the castle you hear a distant howling sound, like that of a large dog or other beast.", "The bridge creaks under your feet. Those planks does not seem very sturdy.", "Far below you the ocean roars and throws its waves against the cliff," " as if trying its best to reach you.", "Parts of the bridge come loose behind you, falling into the chasm far below!", "A gust of wind causes the bridge to sway precariously.", "Under your feet a plank comes loose, tumbling down. For a moment you dangle over the abyss ...", "The section of rope you hold onto crumble in your hands," " parts of it breaking apart. You sway trying to regain balance.") FALL_MESSAGE = "Suddenly the plank you stand on gives way under your feet! You fall!" \ "\nYou try to grab hold of an adjoining plank, but all you manage to do is to " \ "divert your fall westwards, towards the cliff face. This is going to hurt ... " \ "\n ... The world goes dark ...\n\n" class CmdLookBridge(Command): """ looks around at the bridge. Tutorial info: This command assumes that the room has an Attribute "fall_exit", a unique name or dbref to the place they end upp if they fall off the bridge. """ key = 'look' aliases = ["l"] locks = "cmd:all()" help_category = "TutorialWorld" def func(self): """Looking around, including a chance to fall.""" caller = self.caller bridge_position = self.caller.db.tutorial_bridge_position # this command is defined on the room, so we get it through self.obj location = self.obj # randomize the look-echo message = "|c%s|n\n%s\n%s" % (location.key, BRIDGE_POS_MESSAGES[bridge_position], random.choice(BRIDGE_MOODS)) chars = [obj for obj in self.obj.contents_get(exclude=caller) if obj.has_account] if chars: # we create the You see: message manually here message += "\n You see: %s" % ", ".join("|c%s|n" % char.key for char in chars) self.caller.msg(message) # there is a chance that we fall if we are on the western or central # part of the bridge. if bridge_position < 3 and random.random() < 0.05 and not self.caller.is_superuser: # we fall 5% of time. fall_exit = search_object(self.obj.db.fall_exit) if fall_exit: self.caller.msg("|r%s|n" % FALL_MESSAGE) self.caller.move_to(fall_exit[0], quiet=True) # inform others on the bridge self.obj.msg_contents("A plank gives way under %s's feet and " "they fall from the bridge!" % self.caller.key) class CmdBridgeHelp(Command): """ Overwritten help command while on the bridge. """ key = "help" aliases = ["h", "?"] locks = "cmd:all()" help_category = "Tutorial world" def func(self): """Implements the command.""" string = "You are trying hard not to fall off the bridge ..." \ "\n\nWhat you can do is trying to cross the bridge |weast|n" \ " or try to get back to the mainland |wwest|n)." self.caller.msg(string) class BridgeCmdSet(CmdSet): """This groups the bridge commands. We will store it on the room.""" key = "Bridge commands" priority = 1 # this gives it precedence over the normal look/help commands. def at_cmdset_creation(self): """Called at first cmdset creation""" self.add(CmdTutorial()) self.add(CmdEast()) self.add(CmdWest()) self.add(CmdLookBridge()) self.add(CmdBridgeHelp()) BRIDGE_WEATHER = ( "The rain intensifies, making the planks of the bridge even more slippery.", "A gust of wind throws the rain right in your face.", "The rainfall eases a bit and the sky momentarily brightens.", "The bridge shakes under the thunder of a closeby thunder strike.", "The rain pummels you with large, heavy drops. You hear the distinct howl of a large hound in the distance.", "The wind is picking up, howling around you and causing the bridge to sway from side to side.", "Some sort of large bird sweeps by overhead, giving off an eery screech. Soon it has disappeared in the gloom.", "The bridge sways from side to side in the wind.", "Below you a particularly large wave crashes into the rocks.", "From the ruin you hear a distant, otherwordly howl. Or maybe it was just the wind.") class BridgeRoom(WeatherRoom): """ The bridge room implements an unsafe bridge. It also enters the player into a state where they get new commands so as to try to cross the bridge. We want this to result in the account getting a special set of commands related to crossing the bridge. The result is that it will take several steps to cross it, despite it being represented by only a single room. We divide the bridge into steps: self.db.west_exit - - | - - self.db.east_exit 0 1 2 3 4 The position is handled by a variable stored on the character when entering and giving special move commands will increase/decrease the counter until the bridge is crossed. We also has self.db.fall_exit, which points to a gathering location to end up if we happen to fall off the bridge (used by the CmdLookBridge command). """ def at_object_creation(self): """Setups the room""" # this will start the weather room's ticker and tell # it to call update_weather regularly. super(BridgeRoom, self).at_object_creation() # this identifies the exits from the room (should be the command # needed to leave through that exit). These are defaults, but you # could of course also change them after the room has been created. self.db.west_exit = "cliff" self.db.east_exit = "gate" self.db.fall_exit = "cliffledge" # add the cmdset on the room. self.cmdset.add_default(BridgeCmdSet) # since the default Character's at_look() will access the room's # return_description (this skips the cmdset) when # first entering it, we need to explicitly turn off the room # as a normal view target - once inside, our own look will # handle all return messages. self.locks.add("view:false()") def update_weather(self, *args, **kwargs): """ This is called at irregular intervals and makes the passage over the bridge a little more interesting. """ if random.random() < 80: # send a message most of the time self.msg_contents("|w%s|n" % random.choice(BRIDGE_WEATHER)) def at_object_receive(self, character, source_location): """ This hook is called by the engine whenever the player is moved into this room. """ if character.has_account: # we only run this if the entered object is indeed a player object. # check so our east/west exits are correctly defined. wexit = search_object(self.db.west_exit) eexit = search_object(self.db.east_exit) fexit = search_object(self.db.fall_exit) if not (wexit and eexit and fexit): character.msg("The bridge's exits are not properly configured. " "Contact an admin. Forcing west-end placement.") character.db.tutorial_bridge_position = 0 return if source_location == eexit[0]: # we assume we enter from the same room we will exit to character.db.tutorial_bridge_position = 4 else: # if not from the east, then from the west! character.db.tutorial_bridge_position = 0 character.execute_cmd("look") def at_object_leave(self, character, target_location): """ This is triggered when the player leaves the bridge room. """ if character.has_account: # clean up the position attribute del character.db.tutorial_bridge_position DARK_MESSAGES = ("It is pitch black. You are likely to be eaten by a grue.", "It's pitch black. You fumble around but cannot find anything.", "You don't see a thing. You feel around, managing to bump your fingers hard against something. Ouch!", "You don't see a thing! Blindly grasping the air around you, you find nothing.", "It's totally dark here. You almost stumble over some un-evenness in the ground.", "You are completely blind. For a moment you think you hear someone breathing nearby ... " "\n ... surely you must be mistaken.", "Blind, you think you find some sort of object on the ground, but it turns out to be just a stone.", "Blind, you bump into a wall. The wall seems to be covered with some sort of vegetation," " but its too damp to burn.", "You can't see anything, but the air is damp. It feels like you are far underground.") ALREADY_LIGHTSOURCE = "You don't want to stumble around in blindness anymore. You already " \ "found what you need. Let's get light already!" FOUND_LIGHTSOURCE = "Your fingers bump against a splinter of wood in a corner." \ " It smells of resin and seems dry enough to burn! " \ "You pick it up, holding it firmly. Now you just need to" \ " |wlight|n it using the flint and steel you carry with you." class CmdLookDark(Command): """ Look around in darkness Usage: look Look around in the darkness, trying to find something. """ key = "look" aliases = ["l", 'feel', 'search', 'feel around', 'fiddle'] locks = "cmd:all()" help_category = "TutorialWorld" def func(self): """ Implement the command. This works both as a look and a search command; there is a random chance of eventually finding a light source. """ caller = self.caller if random.random() < 0.8: # we don't find anything caller.msg(random.choice(DARK_MESSAGES)) else: # we could have found something! if any(obj for obj in caller.contents if utils.inherits_from(obj, LightSource)): # we already carry a LightSource object. caller.msg(ALREADY_LIGHTSOURCE) else: # don't have a light source, create a new one. create_object(LightSource, key="splinter", location=caller) caller.msg(FOUND_LIGHTSOURCE) class CmdDarkHelp(Command): """ Help command for the dark state. """ key = "help" locks = "cmd:all()" help_category = "TutorialWorld" def func(self): """ Replace the the help command with a not-so-useful help """ string = "Can't help you until you find some light! Try looking/feeling around for something to burn. " \ "You shouldn't give up even if you don't find anything right away." self.caller.msg(string) class CmdDarkNoMatch(Command): """ This is a system command. Commands with special keys are used to override special sitations in the game. The CMD_NOMATCH is used when the given command is not found in the current command set (it replaces Evennia's default behavior or offering command suggestions) """ key = syscmdkeys.CMD_NOMATCH locks = "cmd:all()" def func(self): """Implements the command.""" self.caller.msg("Until you find some light, there's not much you can do. Try feeling around.") class DarkCmdSet(CmdSet): """ Groups the commands of the dark room together. We also import the default say command here so that players can still talk in the darkness. We give the cmdset the mergetype "Replace" to make sure it completely replaces whichever command set it is merged onto (usually the default cmdset) """ key = "darkroom_cmdset" mergetype = "Replace" priority = 2 def at_cmdset_creation(self): """populate the cmdset.""" self.add(CmdTutorial()) self.add(CmdLookDark()) self.add(CmdDarkHelp()) self.add(CmdDarkNoMatch()) self.add(default_cmds.CmdSay) class DarkRoom(TutorialRoom): """ A dark room. This tries to start the DarkState script on all objects entering. The script is responsible for making sure it is valid (that is, that there is no light source shining in the room). The is_lit Attribute is used to define if the room is currently lit or not, so as to properly echo state changes. Since this room (in the tutorial) is meant as a sort of catch-all, we also make sure to heal characters ending up here, since they may have been beaten up by the ghostly apparition at this point. """ def at_object_creation(self): """ Called when object is first created. """ super(DarkRoom, self).at_object_creation() self.db.tutorial_info = "This is a room with custom command sets on itself." # the room starts dark. self.db.is_lit = False self.cmdset.add(DarkCmdSet, permanent=True) def at_init(self): """ Called when room is first recached (such as after a reload) """ self.check_light_state() def _carries_light(self, obj): """ Checks if the given object carries anything that gives light. Note that we do NOT look for a specific LightSource typeclass, but for the Attribute is_giving_light - this makes it easy to later add other types of light-giving items. We also accept if there is a light-giving object in the room overall (like if a splinter was dropped in the room) """ return obj.is_superuser or obj.db.is_giving_light or any(o for o in obj.contents if o.db.is_giving_light) def _heal(self, character): """ Heal a character. """ health = character.db.health_max or 20 character.db.health = health def check_light_state(self, exclude=None): """ This method checks if there are any light sources in the room. If there isn't it makes sure to add the dark cmdset to all characters in the room. It is called whenever characters enter the room and also by the Light sources when they turn on. Args: exclude (Object): An object to not include in the light check. """ if any(self._carries_light(obj) for obj in self.contents if obj != exclude): self.locks.add("view:all()") self.cmdset.remove(DarkCmdSet) self.db.is_lit = True for char in (obj for obj in self.contents if obj.has_account): # this won't do anything if it is already removed char.msg("The room is lit up.") else: # noone is carrying light - darken the room self.db.is_lit = False self.locks.add("view:false()") self.cmdset.add(DarkCmdSet, permanent=True) for char in (obj for obj in self.contents if obj.has_account): if char.is_superuser: char.msg("You are Superuser, so you are not affected by the dark state.") else: # put players in darkness char.msg("The room is completely dark.") def at_object_receive(self, obj, source_location): """ Called when an object enters the room. """ if obj.has_account: # a puppeted object, that is, a Character self._heal(obj) # in case the new guy carries light with them self.check_light_state() def at_object_leave(self, obj, target_location): """ In case people leave with the light, we make sure to clear the DarkCmdSet if necessary. This also works if they are teleported away. """ # since this hook is called while the object is still in the room, # we exclude it from the light check, to ignore any light sources # it may be carrying. self.check_light_state(exclude=obj) class TeleportRoom(TutorialRoom): """ Teleporter - puzzle room. Important attributes (set at creation): puzzle_key - which attr to look for on character puzzle_value - what char.db.puzzle_key must be set to success_teleport_to - where to teleport in case if success success_teleport_msg - message to echo while teleporting to success failure_teleport_to - where to teleport to in case of failure failure_teleport_msg - message to echo while teleporting to failure """ def at_object_creation(self): """Called at first creation""" super(TeleportRoom, self).at_object_creation() # what character.db.puzzle_clue must be set to, to avoid teleportation. self.db.puzzle_value = 1 # target of successful teleportation. Can be a dbref or a # unique room name. self.db.success_teleport_msg = "You are successful!" self.db.success_teleport_to = "treasure room" # the target of the failure teleportation. self.db.failure_teleport_msg = "You fail!" self.db.failure_teleport_to = "dark cell" def at_object_receive(self, character, source_location): """ This hook is called by the engine whenever the player is moved into this room. """ if not character.has_account: # only act on player characters. return # determine if the puzzle is a success or not is_success = str(character.db.puzzle_clue) == str(self.db.puzzle_value) teleport_to = self.db.success_teleport_to if is_success else self.db.failure_teleport_to # note that this returns a list results = search_object(teleport_to) if not results or len(results) > 1: # we cannot move anywhere since no valid target was found. character.msg("no valid teleport target for %s was found." % teleport_to) return if character.is_superuser: # superusers don't get teleported character.msg("Superuser block: You would have been teleported to %s." % results[0]) return # perform the teleport if is_success: character.msg(self.db.success_teleport_msg) else: character.msg(self.db.failure_teleport_msg) # teleport quietly to the new place character.move_to(results[0], quiet=True, move_hooks=False) # we have to call this manually since we turn off move_hooks # - this is necessary to make the target dark room aware of an # already carried light. results[0].at_object_receive(character, self) class OutroRoom(TutorialRoom): """ Outro room. Called when exiting the tutorial, cleans the character of tutorial-related attributes. """ def at_object_creation(self): """ Called when the room is first created. """ super(OutroRoom, self).at_object_creation() self.db.tutorial_info = "The last room of the tutorial. " \ "This cleans up all temporary Attributes " \ "the tutorial may have assigned to the "\ "character." def at_object_receive(self, character, source_location): """ Do cleanup. """ if character.has_account: del character.db.health_max del character.db.health del character.db.last_climbed del character.db.puzzle_clue del character.db.combat_parry_mode del character.db.tutorial_bridge_position for obj in character.contents: if obj.typeclass_path.startswith("evennia.contrib.tutorial_world"): obj.delete() character.tags.clear(category="tutorial_world")
""" Vision-specific analysis functions. $Id: featureresponses.py 7714 2008-01-24 16:42:21Z antolikjan $ """ __version__='$Revision: 7714 $' from math import fmod,floor,pi,sin,cos,sqrt import numpy from numpy.oldnumeric import Float from numpy import zeros, array, size, empty, object_ try: import pylab except ImportError: print "Warning: Could not import matplotlib; pylab plots will not work." import param import topo from topo.base.cf import CFSheet from topo.base.sheetview import SheetView from topo.misc.filepath import normalize_path from topo.misc.numbergenerator import UniformRandom from topo.plotting.plotgroup import create_plotgroup, plotgroups from topo.command.analysis import measure_sine_pref max_value = 0 global_index = () def _complexity_rec(x,y,index,depth,fm): """ Recurrent helper function for complexity() """ global max_value global global_index if depth<size(fm.features): for i in range(size(fm.features[depth].values)): _complexity_rec(x,y,index + (i,),depth+1,fm) else: if max_value < fm.full_matrix[index][x][y]: global_index = index max_value = fm.full_matrix[index][x][y] def complexity(full_matrix): global global_index global max_value """This function expects as an input a object of type FullMatrix which contains responses of all neurons in a sheet to stimuly with different varying parameter values. One of these parameters (features) has to be phase. In such case it computes the classic modulation ratio (see Hawken et al. for definition) for each neuron and returns them as a matrix. """ rows,cols = full_matrix.matrix_shape complexity = zeros(full_matrix.matrix_shape) complex_matrix = zeros(full_matrix.matrix_shape,object_) fftmeasure = zeros(full_matrix.matrix_shape,Float) i = 0 for f in full_matrix.features: if f.name == "phase": phase_index = i break i=i+1 sum = 0.0 res = 0.0 average = 0.0 for x in range(rows): for y in range(cols): complex_matrix[x,y] = []# max_value=-0.01 global_index = () _complexity_rec(x,y,(),0,full_matrix) #compute the sum of the responses over phases given the found index of highest response iindex = array(global_index) sum = 0.0 for i in range(size(full_matrix.features[phase_index].values)): iindex[phase_index] = i sum = sum + full_matrix.full_matrix[tuple(iindex.tolist())][x][y] #average average = sum / float(size(full_matrix.features[phase_index].values)) res = 0.0 #compute the sum of absolute values of the responses minus average for i in range(size(full_matrix.features[phase_index].values)): iindex[phase_index] = i res = res + abs(full_matrix.full_matrix[tuple(iindex.tolist())][x][y] - average) complex_matrix[x,y] = complex_matrix[x,y] + [full_matrix.full_matrix[tuple(iindex.tolist())][x][y]] #this is taking away the DC component #complex_matrix[x,y] -= numpy.min(complex_matrix[x,y]) if x==15 and y==15: pylab.figure() pylab.plot(complex_matrix[x,y]) if x==26 and y==26: pylab.figure() pylab.plot(complex_matrix[x,y]) #complexity[x,y] = res / (2*sum) fft = numpy.fft.fft(complex_matrix[x,y]+complex_matrix[x,y]+complex_matrix[x,y]+complex_matrix[x,y],2048) first_har = 2048/len(complex_matrix[0,0]) if abs(fft[0]) != 0: fftmeasure[x,y] = 2 *abs(fft[first_har]) /abs(fft[0]) else: fftmeasure[x,y] = 0 return fftmeasure def compute_ACDC_orientation_tuning_curves(full_matrix,curve_label,sheet): """ This function allows and alternative computation of orientation tuning curve where for each given orientation the response is computed as a maximum of AC or DC component across the phases instead of the maximum used as a standard in Topographica""" # this method assumes that only single frequency has been used i = 0 for f in full_matrix.features: if f.name == "phase": phase_index = i if f.name == "orientation": orientation_index = i if f.name == "frequency": frequency_index = i i=i+1 print sheet.curve_dict if not sheet.curve_dict.has_key("orientationACDC"): sheet.curve_dict["orientationACDC"]={} sheet.curve_dict["orientationACDC"][curve_label]={} rows,cols = full_matrix.matrix_shape for o in xrange(size(full_matrix.features[orientation_index].values)): s_w = zeros(full_matrix.matrix_shape) for x in range(rows): for y in range(cols): or_response=[] for p in xrange(size(full_matrix.features[phase_index].values)): index = [0,0,0] index[phase_index] = p index[orientation_index] = o index[frequency_index] = 0 or_response.append(full_matrix.full_matrix[tuple(index)][x][y]) fft = numpy.fft.fft(or_response+or_response+or_response+or_response,2048) first_har = 2048/len(or_response) s_w[x][y] = numpy.maximum(2 *abs(fft[first_har]),abs(fft[0])) s = SheetView((s_w,sheet.bounds), sheet.name , sheet.precedence, topo.sim.time(),sheet.row_precedence) sheet.curve_dict["orientationACDC"][curve_label].update({full_matrix.features[orientation_index].values[o]:s}) def phase_preference_scatter_plot(sheet_name,diameter=0.39): r = UniformRandom(seed=1023) preference_map = topo.sim[sheet_name].sheet_views['PhasePreference'] offset_magnitude = 0.03 datax = [] datay = [] (v,bb) = preference_map.view() for z in zeros(66): x = (r() - 0.5)*2*diameter y = (r() - 0.5)*2*diameter rand = r() xoff = sin(rand*2*pi)*offset_magnitude yoff = cos(rand*2*pi)*offset_magnitude xx = max(min(x+xoff,diameter),-diameter) yy = max(min(y+yoff,diameter),-diameter) x = max(min(x,diameter),-diameter) y = max(min(y,diameter),-diameter) [xc1,yc1] = topo.sim[sheet_name].sheet2matrixidx(xx,yy) [xc2,yc2] = topo.sim[sheet_name].sheet2matrixidx(x,y) if((xc1==xc2) & (yc1==yc2)): continue datax = datax + [v[xc1,yc1]] datay = datay + [v[xc2,yc2]] for i in range(0,len(datax)): datax[i] = datax[i] * 360 datay[i] = datay[i] * 360 if(datay[i] > datax[i] + 180): datay[i]= datay[i]- 360 if((datax[i] > 180) & (datay[i]> 180)): datax[i] = datax[i] - 360; datay[i] = datay[i] - 360 if((datax[i] > 180) & (datay[i] < (datax[i]-180))): datax[i] = datax[i] - 360; #datay[i] = datay[i] - 360 f = pylab.figure() ax = f.add_subplot(111, aspect='equal') pylab.plot(datax,datay,'ro') pylab.plot([0,360],[-180,180]) pylab.plot([-180,180],[0,360]) pylab.plot([-180,-180],[360,360]) ax.axis([-180,360,-180,360]) pylab.xticks([-180,0,180,360], [-180,0,180,360]) pylab.yticks([-180,0,180,360], [-180,0,180,360]) pylab.grid() pylab.savefig(normalize_path(str(topo.sim.timestr()) + sheet_name + "_scatter.png")) def analyze_complexity(full_matrix,simple_sheet_name,complex_sheet_name,filename=None): """ Compute modulation ratio for each neuron, to distinguish complex from simple cells. Uses full_matrix data obtained from measure_or_pref(). If there is a sheet named as specified in simple_sheet_name, also plots its phase preference as a scatter plot. """ import topo measured_sheets = [s for s in topo.sim.objects(CFSheet).values() if hasattr(s,'measure_maps') and s.measure_maps] for sheet in measured_sheets: # Divide by two to get into 0-1 scale - that means simple/complex boundry is now at 0.5 complx = array(complexity(full_matrix[sheet]))/2.0 # Should this be renamed to ModulationRatio? sheet.sheet_views['ComplexSelectivity']=SheetView((complx,sheet.bounds), sheet.name , sheet.precedence, topo.sim.time(),sheet.row_precedence) import topo.command.pylabplots topo.command.pylabplots.plot_modulation_ratio(full_matrix,simple_sheet_name=simple_sheet_name,complex_sheet_name=complex_sheet_name,filename=filename) # Avoid error if no simple sheet exists try: phase_preference_scatter_plot(simple_sheet_name,diameter=0.24999) except AttributeError: print "Skipping phase preference scatter plot; could not analyze region %s." \ % simple_sheet_name class measure_and_analyze_complexity(measure_sine_pref): """Macro for measuring orientation preference and then analyzing its complexity.""" def __call__(self,**params): fm = super(measure_and_analyze_complexity,self).__call__(**params) #from topo.command.analysis import measure_or_pref #fm = measure_or_pref() analyze_complexity(fm,simple_sheet_name="V1Simple",complex_sheet_name="V1Complex",filename="ModulationRatio") pg= create_plotgroup(name='Orientation Preference and Complexity',category="Preference Maps", doc='Measure preference for sine grating orientation.', pre_plot_hooks=[measure_and_analyze_complexity.instance()]) pg.add_plot('Orientation Preference',[('Hue','OrientationPreference')]) pg.add_plot('Orientation Preference&Selectivity',[('Hue','OrientationPreference'), ('Confidence','OrientationSelectivity')]) pg.add_plot('Orientation Selectivity',[('Strength','OrientationSelectivity')]) pg.add_plot('Modulation Ratio',[('Strength','ComplexSelectivity')]) pg.add_plot('Phase Preference',[('Hue','PhasePreference')]) pg.add_static_image('Color Key','command/or_key_white_vert_small.png')
import logging from django.contrib import messages from django.contrib.auth.decorators import user_passes_test from django.urls import reverse from django.http import HttpResponseRedirect from django.shortcuts import render from dojo.utils import add_breadcrumb from dojo.forms import ToolTypeForm from dojo.models import Tool_Type logger = logging.getLogger(__name__) @user_passes_test(lambda u: u.is_staff) def new_tool_type(request): if request.method == 'POST': tform = ToolTypeForm(request.POST, instance=Tool_Type()) if tform.is_valid(): tform.save() messages.add_message(request, messages.SUCCESS, 'Tool Type Configuration Successfully Created.', extra_tags='alert-success') return HttpResponseRedirect(reverse('tool_type', )) else: tform = ToolTypeForm() add_breadcrumb(title="New Tool Type Configuration", top_level=False, request=request) return render(request, 'dojo/new_tool_type.html', {'tform': tform}) @user_passes_test(lambda u: u.is_staff) def edit_tool_type(request, ttid): tool_type = Tool_Type.objects.get(pk=ttid) if request.method == 'POST': tform = ToolTypeForm(request.POST, instance=tool_type) if tform.is_valid(): tform.save() messages.add_message(request, messages.SUCCESS, 'Tool Type Configuration Successfully Updated.', extra_tags='alert-success') return HttpResponseRedirect(reverse('tool_type', )) else: tform = ToolTypeForm(instance=tool_type) add_breadcrumb(title="Edit Tool Type Configuration", top_level=False, request=request) return render(request, 'dojo/edit_tool_type.html', { 'tform': tform, }) @user_passes_test(lambda u: u.is_staff) def tool_type(request): confs = Tool_Type.objects.all().order_by('name') add_breadcrumb(title="Tool Type List", top_level=not len(request.GET), request=request) return render(request, 'dojo/tool_type.html', {'confs': confs, })
"""Fichier contenant le paramètre 'voir' de la commande 'chemin'.""" from primaires.format.fonctions import oui_ou_non from primaires.interpreteur.masque.parametre import Parametre from primaires.pnj.chemin import FLAGS class PrmVoir(Parametre): """Commande 'chemin voir'. """ def __init__(self): """Constructeur du paramètre""" Parametre.__init__(self, "voir", "view") self.schema = "<cle>" self.aide_courte = "affiche le détail d'un chemin" self.aide_longue = \ "Cette commande permet d'obtenir plus d'informations sur " \ "un chemin (ses flags actifs, ses salles et sorties...)." def ajouter(self): """Méthode appelée lors de l'ajout de la commande à l'interpréteur""" cle = self.noeud.get_masque("cle") cle.proprietes["regex"] = r"'[a-z0-9_:]{3,}'" def interpreter(self, personnage, dic_masques): """Interprétation du paramètre""" cle = dic_masques["cle"].cle if cle not in importeur.pnj.chemins: personnage << "|err|Ce chemin n'existe pas.|ff|" return chemin = importeur.pnj.chemins[cle] msg = "Détail sur le chemin {} :".format(chemin.cle) msg += "\n Flags :" for nom_flag in FLAGS.keys(): msg += "\n {}".format(nom_flag.capitalize()) msg += " : " + oui_ou_non(chemin.a_flag(nom_flag)) msg += "\n Salles du chemin :" if len(chemin.salles) == 0: msg += "\n Aucune" else: for salle, direction in chemin.salles.items(): msg += "\n " + salle.ident.ljust(20) + " " msg += direction.ljust(10) if salle in chemin.salles_retour and \ chemin.salles_retour[salle]: msg += " (retour " + chemin.salles_retour[salle] + ")" personnage << msg
""" This package contains the qibuild actions. """ from __future__ import absolute_import from __future__ import unicode_literals from __future__ import print_function
import sys import re import datetime import types import inspect import collections import json from required_config import RequiredConfig from namespace import Namespace from .datetime_util import datetime_from_ISO_string as datetime_converter from .datetime_util import date_from_ISO_string as date_converter import datetime_util def option_value_str(an_option): """return an instance of Option's value as a string. The option instance doesn't actually have to be from the Option class. All it requires is that the passed option instance has a ``value`` attribute. """ if an_option.value is None: return '' try: converter = to_string_converters[type(an_option.value)] s = converter(an_option.value) except KeyError: if not isinstance(an_option.value, basestring): s = unicode(an_option.value) else: s = an_option.value if an_option.from_string_converter in converters_requiring_quotes: s = "'''%s'''" % s return s def str_dict_keys(a_dict): """return a modified dict where all the keys that are anything but str get converted to str. E.g. >>> result = str_dict_keys({u'name': u'Peter', u'age': 99, 1: 2}) >>> # can't compare whole dicts in doctests >>> result['name'] u'Peter' >>> result['age'] 99 >>> result[1] 2 The reason for this is that in Python <= 2.6.4 doing ``MyClass(**{u'name': u'Peter'})`` would raise a TypeError Note that only unicode types are converted to str types. The reason for that is you might have a class that looks like this:: class Option(object): def __init__(self, foo=None, bar=None, **kwargs): ... And it's being used like this:: Option(**{u'foo':1, u'bar':2, 3:4}) Then you don't want to change that {3:4} part which becomes part of `**kwargs` inside the __init__ method. Using integers as parameter keys is a silly example but the point is that due to the python 2.6.4 bug only unicode keys are converted to str. """ new_dict = {} for key in a_dict: if isinstance(key, unicode): new_dict[str(key)] = a_dict[key] else: new_dict[key] = a_dict[key] return new_dict def io_converter(input_str): """ a conversion function for to select stdout, stderr or open a file for writing""" if type(input_str) is str: input_str_lower = input_str.lower() if input_str_lower == 'stdout': return sys.stdout if input_str_lower == 'stderr': return sys.stderr return open(input_str, "w") return input_str def timedelta_converter(input_str): """a conversion function for time deltas""" if isinstance(input_str, basestring): days, hours, minutes, seconds = 0, 0, 0, 0 details = input_str.split(':') if len(details) >= 4: days = int(details[-4]) if len(details) >= 3: hours = int(details[-3]) if len(details) >= 2: minutes = int(details[-2]) if len(details) >= 1: seconds = int(details[-1]) return datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds) raise ValueError(input_str) def boolean_converter(input_str): """ a conversion function for boolean """ return input_str.lower() in ("true", "t", "1", "y", "yes") import __builtin__ _all_named_builtins = dir(__builtin__) def class_converter(input_str): """ a conversion that will import a module and class name """ if not input_str: return None if '.' not in input_str and input_str in _all_named_builtins: return eval(input_str) parts = [x.strip() for x in input_str.split('.') if x.strip()] try: # first try as a complete module package = __import__(input_str) except ImportError: # it must be a class from a module if len(parts) == 1: # since it has only one part, it must be a class from __main__ parts = ('__main__', input_str) package = __import__('.'.join(parts[:-1]), globals(), locals(), []) obj = package for name in parts[1:]: obj = getattr(obj, name) return obj def classes_in_namespaces_converter(template_for_namespace="cls%d", name_of_class_option='cls', instantiate_classes=False): """take a comma delimited list of class names, convert each class name into an actual class as an option within a numbered namespace. This function creates a closure over a new function. That new function, in turn creates a class derived from RequiredConfig. The inner function, 'class_list_converter', populates the InnerClassList with a Namespace for each of the classes in the class list. In addition, it puts the each class itself into the subordinate Namespace. The requirement discovery mechanism of configman then reads the InnerClassList's requried config, pulling in the namespaces and associated classes within. For example, if we have a class list like this: "Alpha, Beta", then this converter will add the following Namespaces and options to the configuration: "cls0" - the subordinate Namespace for Alpha "cls0.cls" - the option containing the class Alpha itself "cls1" - the subordinate Namespace for Beta "cls1.cls" - the option containing the class Beta itself Optionally, the 'class_list_converter' inner function can embue the InnerClassList's subordinate namespaces with aggregates that will instantiate classes from the class list. This is a convenience to the programmer who would otherwise have to know ahead of time what the namespace names were so that the classes could be instantiated within the context of the correct namespace. Remember the user could completely change the list of classes at run time, so prediction could be difficult. "cls0" - the subordinate Namespace for Alpha "cls0.cls" - the option containing the class Alpha itself "cls0.cls_instance" - an instance of the class Alpha "cls1" - the subordinate Namespace for Beta "cls1.cls" - the option containing the class Beta itself "cls1.cls_instance" - an instance of the class Beta parameters: template_for_namespace - a template for the names of the namespaces that will contain the classes and their associated required config options. The namespaces will be numbered sequentially. By default, they will be "cls1", "cls2", etc. class_option_name - the name to be used for the class option within the nested namespace. By default, it will choose: "cls1.cls", "cls2.cls", etc. instantiate_classes - a boolean to determine if there should be an aggregator added to each namespace that instantiates each class. If True, then each Namespace will contain elements for the class, as well as an aggregator that will instantiate the class. """ #-------------------------------------------------------------------------- def class_list_converter(class_list_str): """This function becomes the actual converter used by configman to take a string and convert it into the nested sequence of Namespaces, one for each class in the list. It does this by creating a proxy class stuffed with its own 'required_config' that's dynamically generated.""" if isinstance(class_list_str, basestring): class_list = [x.strip() for x in class_list_str.split(',')] else: raise TypeError('must be derivative of a basestring') #====================================================================== class InnerClassList(RequiredConfig): """This nested class is a proxy list for the classes. It collects all the config requirements for the listed classes and places them each into their own Namespace. """ # we're dynamically creating a class here. The following block of # code is actually adding class level attributes to this new class required_config = Namespace() # 1st requirement for configman subordinate_namespace_names = [] # to help the programmer know # what Namespaces we added namespace_template = template_for_namespace # save the template # for future reference class_option_name = name_of_class_option # save the class's option # name for the future # for each class in the class list for namespace_index, a_class in enumerate(class_list): # figure out the Namespace name namespace_name = template_for_namespace % namespace_index subordinate_namespace_names.append(namespace_name) # create the new Namespace required_config[namespace_name] = Namespace() # add the option for the class itself required_config[namespace_name].add_option( name_of_class_option, #doc=a_class.__doc__ # not helpful if too verbose default=a_class, from_string_converter=class_converter ) if instantiate_classes: # add an aggregator to instantiate the class required_config[namespace_name].add_aggregation( "%s_instance" % name_of_class_option, lambda c, lc, a: lc[name_of_class_option](lc)) @classmethod def to_str(cls): """this method takes this inner class object and turns it back into the original string of classnames. This is used primarily as for the output of the 'help' option""" return ', '.join( py_obj_to_str(v[name_of_class_option].value) for v in cls.get_required_config().values() if isinstance(v, Namespace)) return InnerClassList # result of class_list_converter return class_list_converter # result of classes_in_namespaces_converter def regex_converter(input_str): return re.compile(input_str) compiled_regexp_type = type(re.compile(r'x')) from_string_converters = { int: int, float: float, str: str, unicode: unicode, bool: boolean_converter, dict: json.loads, datetime.datetime: datetime_converter, datetime.date: date_converter, datetime.timedelta: timedelta_converter, type: class_converter, types.FunctionType: class_converter, compiled_regexp_type: regex_converter, } def py_obj_to_str(a_thing): if a_thing is None: return '' if inspect.ismodule(a_thing): return a_thing.__name__ if a_thing.__module__ == '__builtin__': return a_thing.__name__ if a_thing.__module__ == "__main__": return a_thing.__name__ if hasattr(a_thing, 'to_str'): return a_thing.to_str() return "%s.%s" % (a_thing.__module__, a_thing.__name__) def list_to_str(a_list): return ', '.join(to_string_converters[type(x)](x) for x in a_list) to_string_converters = { int: str, float: str, str: str, unicode: unicode, list: list_to_str, tuple: list_to_str, bool: lambda x: 'True' if x else 'False', dict: json.dumps, datetime.datetime: datetime_util.datetime_to_ISO_string, datetime.date: datetime_util.date_to_ISO_string, datetime.timedelta: datetime_util.timedelta_to_str, type: py_obj_to_str, types.ModuleType: py_obj_to_str, types.FunctionType: py_obj_to_str, compiled_regexp_type: lambda x: x.pattern, } converters_requiring_quotes = [eval, regex_converter]
from supybot.test import * class MyChannelLoggerTestCase(PluginTestCase): plugins = ('MyChannelLogger',)
import sys def inv(s): if s[0] == '-': return s[1:] elif s[0] == '+': return '-' + s[1:] else: # plain number return '-' + s if len(sys.argv) != 1: print 'Usage:', sys.argv[0] sys.exit(1) for line in sys.stdin: linesplit = line.strip().split() if len(linesplit) == 3: assert(linesplit[0] == 'p') print('p ' + inv(linesplit[2]) + ' ' + linesplit[1]) elif len(linesplit) == 5: assert(linesplit[0] == 's') print('s ' + \ inv(linesplit[2]) + ' ' + linesplit[1] + ' ' + \ inv(linesplit[4]) + ' ' + linesplit[3] ) elif len(linesplit) == 0: print
def test_default(cookies): """ Checks if default configuration is working """ result = cookies.bake() assert result.exit_code == 0 assert result.project.isdir() assert result.exception is None
import argparse import os import sys import clusto from clusto import script_helper class Console(script_helper.Script): ''' Use clusto's hardware port mappings to console to a remote server using the serial console. ''' def __init__(self): script_helper.Script.__init__(self) def _add_arguments(self, parser): user = os.environ.get('USER') parser.add_argument('--user', '-u', default=user, help='SSH User (you can also set this in clusto.conf too' 'in console.user: --user > clusto.conf:console.user > "%s")' % user) parser.add_argument('server', nargs=1, help='Object to console to (IP or name)') def add_subparser(self, subparsers): parser = self._setup_subparser(subparsers) self._add_arguments(parser) def run(self, args): try: server = clusto.get(args.server[0]) if not server: raise LookupError('Object "%s" does not exist' % args.server) except Exception as e: self.debug(e) self.error('No object like "%s" was found' % args.server) return 1 server = server[0] if not hasattr(server, 'console'): self.error('The object %s lacks a console method' % server.name) return 2 user = os.environ.get('USER') if args.user: self.debug('Grabbing user from parameter') user = args.user else: self.debug('Grabbing user from config file or default') user = self.get_conf('console.user', user) self.debug('User is "%s"' % user) return(server.console(ssh_user=user)) def main(): console, args = script_helper.init_arguments(Console) return(console.run(args)) if __name__ == '__main__': sys.exit(main())
from unittest import TestCase from django.core.management import call_command class SendAiPicsStatsTestCase(TestCase): def test_run_command(self): call_command('send_ai_pics_stats')
from m5.objects import * from arm_generic import * import switcheroo root = LinuxArmFSSwitcheroo( mem_class=DDR3_1600_x64, cpu_classes=(AtomicSimpleCPU, AtomicSimpleCPU) ).create_root() run_test = switcheroo.run_test
""" Display a fortune-telling, swimming fish. Wanda has no use what-so-ever. It only takes up disk space and compilation time, and if loaded, it also takes up precious bar space, memory, and cpu cycles. Anybody found using it should be promptly sent for a psychiatric evaluation. Configuration parameters: cache_timeout: refresh interval for this module (default 0) format: display format for this module (default '{nomotion}[{fortune} ]{wanda}{motion}') fortune_timeout: refresh interval for fortune (default 60) Format placeholders: {fortune} one of many aphorisms or vague prophecies {wanda} name of one of the most commonly kept freshwater aquarium fish {motion} biologically propelled motion through a liquid medium {nomotion} opposite behavior of motion to prevent modules from shifting Optional: fortune-mod: the fortune cookie program from bsd games Examples: ``` wanda_the_fish { format = '[\?if=fortune {nomotion}][{fortune} ]' format += '{wanda}[\?if=fortune {motion}]' } wanda_the_fish { format = '[{fortune} ]{wanda}' cache_timeout = -1 } wanda_the_fish { format = '[{fortune} ]{nomotion}{wanda}{motion}' } wanda_the_fish { cache_timeout = 2 } ``` @author lasers SAMPLE OUTPUT [ {'full_text': 'innovate, v.: To annoy people.'}, {'full_text': ' <', 'color': '#ffa500'}, {'full_text': '\xba', 'color': '#add8e6'}, {'full_text': ',', 'color': '#ff8c00'}, {'full_text': '))', 'color': '#ffa500'}, {'full_text': '))>< ', 'color': '#ff8c00'}, ] idle [ {'full_text': ' <', 'color': '#ffa500'}, {'full_text': '\xba', 'color': '#add8e6'}, {'full_text': ',', 'color': '#ff8c00'}, {'full_text': '))', 'color': '#ffa500'}, {'full_text': '))>3', 'color': '#ff8c00'}, ] py3status [ {'full_text': 'py3status is so cool!'}, {'full_text': ' <', 'color': '#ffa500'}, {'full_text': '\xba', 'color': '#add8e6'}, {'full_text': ',', 'color': '#ff8c00'}, {'full_text': '))', 'color': '#ffa500'}, {'full_text': '))>< ', 'color': '#ff8c00'}, ] """ from time import time class Py3status: """ """ # available configuration parameters cache_timeout = 0 format = "{nomotion}[{fortune} ]{wanda}{motion}" fortune_timeout = 60 def post_config_hook(self): body = ( "[\?color=orange&show <" "[\?color=lightblue&show º]" "[\?color=darkorange&show ,]))" "[\?color=darkorange&show ))>%s]]" ) wanda = [body % fin for fin in ("<", ">", "<", "3")] self.wanda = [self.py3.safe_format(x) for x in wanda] self.wanda_length = len(self.wanda) self.index = 0 self.fortune_command = ["fortune", "-as"] self.fortune = self.py3.storage_get("fortune") or None self.toggled = self.py3.storage_get("toggled") or False self.motions = {"motion": " ", "nomotion": ""} # deal with {new,old} timeout between storage fortune_timeout = self.py3.storage_get("fortune_timeout") timeout = None if self.fortune_timeout != fortune_timeout: timeout = time() + self.fortune_timeout self.time = ( timeout or self.py3.storage_get("time") or (time() + self.fortune_timeout) ) def _set_fortune(self, state=None, new=False): if not self.fortune_command: return if new: try: fortune_data = self.py3.command_output(self.fortune_command) except self.py3.CommandError: self.fortune = "" self.fortune_command = None else: self.fortune = " ".join(fortune_data.split()) self.time = time() + self.fortune_timeout elif state is None: if self.toggled and time() >= self.time: self._set_fortune(new=True) else: self.toggled = state if state: self._set_fortune(new=True) else: self.fortune = None def _set_motion(self): for k in self.motions: self.motions[k] = "" if self.motions[k] else " " def _set_wanda(self): self.index += 1 if self.index >= self.wanda_length: self.index = 0 def wanda_the_fish(self): self._set_fortune() self._set_motion() self._set_wanda() return { "cached_until": self.py3.time_in(self.cache_timeout), "full_text": self.py3.safe_format( self.format, { "fortune": self.fortune, "motion": self.motions["motion"], "nomotion": self.motions["nomotion"], "wanda": self.wanda[self.index], }, ), } def kill(self): self.py3.storage_set("toggled", self.toggled) self.py3.storage_set("fortune", self.fortune) self.py3.storage_set("fortune_timeout", self.fortune_timeout) self.py3.storage_set("time", self.time) def on_click(self, event): if not self.fortune_command: return self._set_fortune(not self.toggled) if __name__ == "__main__": """ Run module in test mode. """ from py3status.module_test import module_test module_test(Py3status)
from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [("elections", "0049_move_status")] operations = [ migrations.RemoveField(model_name="election", name="rejection_reason"), migrations.RemoveField(model_name="election", name="suggested_status"), migrations.RemoveField(model_name="election", name="suggestion_reason"), ]
from __future__ import print_function import re import logging logging.basicConfig(level=logging.INFO) class Executor(object): def __init__(self, op_map): processed = {} for pattern, f in op_map.iteritems(): s = self._build_pattern_groups(pattern.lower()) processed[re.compile(s)] = f self.operations = processed def execute(self, context, op): s = "%04x" % op for pattern, f in self.operations.iteritems(): m = pattern.match(s) if m: return f(context, *[int(v, base=16) for v in m.groups()]) assert False, s def _build_pattern_groups(self, pattern): s = pattern.replace('?', '.') for id in ['x', 'y', 'z']: m = re.search('%s+' % id, s) if m: s = s[:m.start()] + ('(.{%s})' % (m.end() - m.start())) + s[m.end():] return '^' + s + '$' def set_mem_v0_vx(context, x): for i in range(x): context.memory.write_byte(context.index_reg + i, context.v[i]) context.pc += 2 def fill_v0_vx(context, x): for i in range(x+1): context.v[i] = context.memory.get_byte(context.index_reg + i) context.pc += 2 def set_bcd_vx(context, x): val = int(context.v[x]) context.memory.write_byte(context.index_reg, val / 100) context.memory.write_byte(context.index_reg + 1, val % 100 / 10) context.memory.write_byte(context.index_reg + 2, val % 100 % 10) context.pc += 2 def set_i_font(context, x): context.index_reg = context.memory.get_font_address(context.v[x]) context.pc += 2 def add_reg_ind(context, x): context.index_reg += context.v[x] context.pc += 2 def set_delay_timer(context, x): context.delay_timer = context.v[x] context.pc += 2 def set_sound_timer(context, x): context.sound_timer = context.v[x] context.pc += 2 def set_vx_key_pressed(context, x): context.v[x] = context.keypad.wait_for_keypress() context.pc += 2 def set_vx_delay_timer(context, x): context.v[x] = context.delay_timer context.pc += 2 def skip_key_vx(context, x, result=True): if context.keypad.is_keypressed(context.v[x]) == result: context.pc += 2 context.pc += 2 def draw_sprite(context, x, y, n): sprite = [] for cb in range(n): sprite.append(context.memory.get_byte(context.index_reg + cb)) collision = context.screen.draw(context.v[x], context.v[y], sprite) context.v[15] = collision context.pc += 2 def jump_nnn_v0(context, nnn): context.pc = context.v[0] + nnn def set_vx_rand(context, x, nn): import random context.v[x] = random.randint(0, 0xFF) & nn context.pc += 2 def jump_noteq(context, x, y): if context.v[x] != context.v[y]: context.pc += 2 context.pc += 2 def shift_vy_left(context, x, y): context.v[15] = context.v[15] >> 7 # First value context.v[x] = (context.v[y] << 1) % 255 context.pc += 2 def shift_right(context, x, y): context.v[15] = context.v[y] & 0x1 context.v[x] = context.v[y] >> 1 context.pc += 2 def sub_vx_vy_vf(context, x, y): logging.info('Setting V[X] = V[X] - V[Y], V[F] = 1 if V[Y] > V[X]') context.v[15] = 1 if context.v[y] > context.v[x] else 0 context.v[x] = context.v[x] - context.v[y] context.pc += 2 def add_vx_vy(context, x, y): logging.info('Setting V[X] = V[X] + V[Y]') val = context.v[x] + context.v[y] context.v[15] = 1 if val > 255 else 0 context.v[x] = val % 256 context.pc += 2 def sub_vx_vy(context, x, y): logging.info('Setting V[X] = V[X] - V[Y]') val = context.v[x] - context.v[y] context.v[15] = 1 if val < 0 else 0 context.v[x] = val % 256 context.pc += 2 def set_vx_or_vy(context, x, y): logging.info('Setting V[X] = V[X] | V[Y]') context.v[x] = context.v[x] | context.v[y] context.pc += 2 def set_vx_xor_vy(context, x, y): logging.info('Setting V[X] = V[X] ^ V[Y]') context.v[x] = context.v[x] ^ context.v[y] context.pc += 2 def set_vx_and_vy(context, x, y): logging.info('Setting V[X] = V[X] & V[Y]') context.v[x] = context.v[x] & context.v[y] context.pc += 2 def set_vx_vy(context, x, y): logging.info('Setting V[X] = V[Y]') context.v[x] = context.v[y] context.pc += 2 def add_reg(context, x, nnn): logging.info('Adding NNN to V[X]') context.v[x] = (context.v[x] + nnn) % 256 context.pc += 2 def set_i(context, nnn): logging.info('Setting NNN to index_reg') context.index_reg = nnn context.pc += 2 def pop_stack(context): logging.info('Returning from a subroutine') context.pc = context.stack.pop() def call_rca1082(context, address): #TODO print("operation not implemented yet:", address) context.pc += 1 def clear(context): logging.info('Clearing screen') context.screen.clear() context.pc += 2 def jump(context, address): logging.info('Jump at 0x%2x address' % address) context.pc = address def call(context, address): logging.info('Calling subroutine at 0x%2x address' % address) context.pc += 2 context.stack.append(context.pc) context.pc = address def skip_equal(context, x, nnn, ifeq=True): logging.info('Skip if V[X] === NNN is %s' % ifeq) if (context.v[x] == nnn) == ifeq: context.pc += 2 context.pc += 2 def skip_eq_reg(context, x, y): logging.info('Skip if V[X] === V[Y]') if context.v[x] == context.v[y]: context.pc += 2 context.pc += 2 def set_reg(context, x, nnn): logging.info('Set NNN to cpu reg V[x]') context.v[x] = nnn context.pc += 2 op_map = { '0?E0': clear, '0?EE': pop_stack, '0XXX': call_rca1082, '1XXX': jump, '2XXX': call, '3XYY': skip_equal, '4XYY': lambda context, x, nn: skip_equal(context, x, nn, ifeq = False), '5XY0': skip_eq_reg, '6XYY': set_reg, '7XYY': add_reg, '8XY0': set_vx_vy, '8XY1': set_vx_or_vy, '8XY2': set_vx_and_vy, '8XY3': set_vx_xor_vy, '8XY4': add_vx_vy, '8XY5': sub_vx_vy, '8XY6': shift_right, '8XY7': sub_vx_vy_vf, '8XYE': shift_vy_left, '9XY0': jump_noteq, 'AXXX': set_i, 'BXXX': jump_nnn_v0, 'CXYY': set_vx_rand, 'DXYZ': draw_sprite, 'EX9E': lambda context, x: skip_key_vx(x, result=False), 'EXA1': skip_key_vx, 'FX07': set_vx_delay_timer, 'FX0A': set_vx_key_pressed, 'FX15': set_delay_timer, 'FX18': set_sound_timer, 'FX1E': add_reg_ind, 'FX29': set_i_font, 'FX33': set_bcd_vx, 'FX55': set_mem_v0_vx, 'FX65': fill_v0_vx }
""" Created on Thu Jan 03 10:16:39 2013 @author: Grahesh """ import pandas from qstkutil import DataAccess as da import numpy as np import math import copy import qstkutil.qsdateutil as du import datetime as dt import qstkutil.DataAccess as da import qstkutil.tsutil as tsu import qstkstudy.EventProfiler as ep """ Accepts a list of symbols along with start and end date Returns the Event Matrix which is a pandas Datamatrix Event matrix has the following structure : |IBM |GOOG|XOM |MSFT| GS | JP | (d1)|nan |nan | 1 |nan |nan | 1 | (d2)|nan | 1 |nan |nan |nan |nan | (d3)| 1 |nan | 1 |nan | 1 |nan | (d4)|nan | 1 |nan | 1 |nan |nan | ................................... ................................... Also, d1 = start date nan = no information about any event. 1 = status bit(positively confirms the event occurence) """ storename = "NSEData" # get data from our daily prices source closefield = "close" volumefield = "volume" window = 10 def getHalfYearEndDates(timestamps): newTS=[] tempYear=timestamps[0].year flag=1 for x in range(0, len(timestamps)-1): if(timestamps[x].year==tempYear): if(timestamps[x].month==4 and flag==1): newTS.append(timestamps[x-1]) flag=0 if(timestamps[x].month==10): newTS.append(timestamps[x-1]) tempYear=timestamps[x].year+1 flag=1 return newTS def findEvents(symbols, startday,endday, marketSymbol,verbose=False): # Reading the Data for the list of Symbols. timeofday=dt.timedelta(hours=16) timestamps = du.getNSEdays(startday,endday,timeofday) endOfHalfYear=getHalfYearEndDates(timestamps) dataobj = da.DataAccess('NSEData') if verbose: print __name__ + " reading data" # Reading the Data close = dataobj.get_data(timestamps, symbols, closefield) # Completing the Data - Removing the NaN values from the Matrix close = (close.fillna(method='ffill')).fillna(method='backfill') # Calculating Daily Returns for the Market tsu.returnize0(close.values) # Calculating the Returns of the Stock Relative to the Market # So if a Stock went up 5% and the Market rised 3%. The the return relative to market is 2% mktneutDM = close - close[marketSymbol] np_eventmat = copy.deepcopy(mktneutDM) for sym in symbols: for time in timestamps: np_eventmat[sym][time]=np.NAN if verbose: print __name__ + " finding events" # Generating the Event Matrix # Event described is : Analyzing half year events for given stocks. for symbol in symbols: for i in endOfHalfYear: np_eventmat[symbol][i] = 1.0 #overwriting by the bit, marking the event return np_eventmat symbols = np.loadtxt('NSE500port.csv',dtype='S13',comments='#', skiprows=1) startday = dt.datetime(2011,1,1) endday = dt.datetime(2012,1,1) eventMatrix = findEvents(symbols,startday,endday,marketSymbol='NSE500',verbose=True) eventMatrix.to_csv('eventmatrix.csv', sep=',') eventProfiler = ep.EventProfiler(eventMatrix,startday,endday,lookback_days=20,lookforward_days=20,verbose=True) eventProfiler.study(filename="HalfYearEventStudy.jpg",plotErrorBars=True,plotMarketNeutral=True,plotEvents=False,marketSymbol='NSE500')
""" A chain with four possible intermediates with different notBefore and notAfter dates, for testing path bulding prioritization. """ import sys sys.path += ['../..'] import gencerts DATE_A = '150101120000Z' DATE_B = '150102120000Z' DATE_C = '180101120000Z' DATE_D = '180102120000Z' root = gencerts.create_self_signed_root_certificate('Root') root.set_validity_range(DATE_A, DATE_D) int_ac = gencerts.create_intermediate_certificate('Intermediate', root) int_ac.set_validity_range(DATE_A, DATE_C) int_ad = gencerts.create_intermediate_certificate('Intermediate', root) int_ad.set_validity_range(DATE_A, DATE_D) int_ad.set_key(int_ac.get_key()) int_bc = gencerts.create_intermediate_certificate('Intermediate', root) int_bc.set_validity_range(DATE_B, DATE_C) int_bc.set_key(int_ac.get_key()) int_bd = gencerts.create_intermediate_certificate('Intermediate', root) int_bd.set_validity_range(DATE_B, DATE_D) int_bd.set_key(int_ac.get_key()) target = gencerts.create_end_entity_certificate('Target', int_ac) target.set_validity_range(DATE_A, DATE_D) gencerts.write_chain('The root', [root], out_pem='root.pem') gencerts.write_chain('Intermediate with validity range A..C', [int_ac], out_pem='int_ac.pem') gencerts.write_chain('Intermediate with validity range A..D', [int_ad], out_pem='int_ad.pem') gencerts.write_chain('Intermediate with validity range B..C', [int_bc], out_pem='int_bc.pem') gencerts.write_chain('Intermediate with validity range B..D', [int_bd], out_pem='int_bd.pem') gencerts.write_chain('The target', [target], out_pem='target.pem')
from simulation.aivika.modeler.model import * from simulation.aivika.modeler.port import * from simulation.aivika.modeler.stream import * from simulation.aivika.modeler.data_type import * from simulation.aivika.modeler.pdf import * def uniform_random_stream(transact_type, min_delay, max_delay): """Return a new stream of transacts with random delays distributed uniformly.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomUniformStream ' + str(min_delay) + ' ' + str(max_delay) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def uniform_int_random_stream(transact_type, min_delay, max_delay): """Return a new stream of transacts with integer random delays distributed uniformly.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomUniformIntStream ' + str(min_delay) + ' ' + str(max_delay) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def triangular_random_stream(transact_type, min_delay, median_delay, max_delay): """Return a new stream of transacts with random delays having the triangular distribution.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomTriangularStream ' + str(min_delay) + ' ' + str(median_delay) + ' ' + str(max_delay) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def normal_random_stream(transact_type, mean_delay, delay_deviation): """Return a new stream of transacts with random delays having the normal distribution.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomNormalStream ' + str(mean_delay) + ' ' + str(delay_deviation) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def lognormal_random_stream(transact_type, normal_mean_delay, normal_delay_deviation): """Return a new stream of transacts with random delays having the lognormal distribution. The numerical parameters are related to the normal distribution that this distribution is derived from. """ expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomLogNormalStream ' + str(normal_mean_delay) + ' ' + str(normal_delay_deviation) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def exponential_random_stream(transact_type, mean_delay): """Return a new stream of transacts with random delays having the exponential distribution with the specified mean (a reciprocal of the rate).""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomExponentialStream ' + str(mean_delay) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def erlang_random_stream(transact_type, scale, shape): """Return a new stream of transacts with random delays having the Erlang distribution with the specified scale (a reciprocal of the rate) and shape parameters.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomErlangStream ' + str(scale) + ' ' + str(shape) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def poisson_random_stream(transact_type, mean_delay): """Return a new stream of transacts with random delays having the Poisson distribution with the specified mean.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomPoissonStream ' + str(mean_delay) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def binomial_random_stream(transact_type, probability, trials): """Return a new stream of transacts with random delays having the binomial distribution with the specified probability and trials.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomBinomialStream ' + str(probability) + ' ' + str(trials) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def gamma_random_stream(transact_type, shape, scale): """Return a new stream of transacts with random delays having the Gamma distribution by the specified shape and scale.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomGammaStream ' + str(shape) + ' ' + str(scale) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def beta_random_stream(transact_type, alpha, beta): """Return a new stream of transacts with random delays having the Beta distribution by the specified shape parameters (alpha and beta).""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomBetaStream ' + str(alpha) + ' ' + str(beta) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def weibull_random_stream(transact_type, shape, scale): """Return a new stream of transacts with random delays having the Weibull distribution by the specified shape and scale.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomWeibullStream ' + str(shape) + ' ' + str(scale) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y def discrete_random_stream(transact_type, pdf): """Return a new stream of transacts with random delays having the discrete distribution by the specified probability density function.""" expect_transact_type(transact_type) model = transact_type.get_model() code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ ' code += 'randomDiscreteStream ' + encode_pdf(pdf) y = StreamPort(model, transact_type.get_data_type()) y.bind_to_input() y.write(code) return y
def voter_star_on_save_doc_template_values(url_root): """ Show documentation about voterStarOnSave """ required_query_parameter_list = [ { 'name': 'api_key', 'value': 'string (from post, cookie, or get (in that order))', # boolean, integer, long, string 'description': 'The unique key provided to any organization using the WeVoteServer APIs', }, { 'name': 'voter_device_id', 'value': 'string', # boolean, integer, long, string 'description': 'An 88 character unique identifier linked to a voter record on the server', }, { 'name': 'kind_of_ballot_item', 'value': 'string', # boolean, integer, long, string 'description': 'What is the type of ballot item for which we are saving the \'on\' status? ' '(kind_of_ballot_item is either "OFFICE", "CANDIDATE", "POLITICIAN" or "MEASURE")', }, { 'name': 'ballot_item_id', 'value': 'integer', # boolean, integer, long, string 'description': 'The unique internal identifier for this ballot_item ' '(either ballot_item_id OR ballot_item_we_vote_id required -- not both. ' 'If it exists, ballot_item_id is used instead of ballot_item_we_vote_id)', }, { 'name': 'ballot_item_we_vote_id', 'value': 'string', # boolean, integer, long, string 'description': 'The unique identifier for this ballot_item across all networks ' '(either ballot_item_id OR ballot_item_we_vote_id required -- not both. ' 'NOTE: In the future we might support other identifiers used in the industry.', }, ] optional_query_parameter_list = [ ] potential_status_codes_list = [ { 'code': 'VALID_VOTER_DEVICE_ID_MISSING', 'description': 'Cannot proceed. A valid voter_device_id parameter was not included.', }, { 'code': 'VALID_VOTER_ID_MISSING', 'description': 'Cannot proceed. Missing voter_id while trying to save.', }, { 'code': 'STAR_ON_OFFICE CREATE/UPDATE ITEM_STARRED', 'description': '', }, { 'code': 'STAR_ON_CANDIDATE CREATE/UPDATE ITEM_STARRED', 'description': '', }, { 'code': 'STAR_ON_MEASURE CREATE/UPDATE ITEM_STARRED', 'description': '', }, ] try_now_link_variables_dict = { 'kind_of_ballot_item': 'CANDIDATE', 'ballot_item_id': '5655', } api_response = '{\n' \ ' "status": string (description of what happened),\n' \ ' "success": boolean (did the save happen?),\n' \ ' "ballot_item_id": integer,\n' \ ' "ballot_item_we_vote_id": string,\n' \ ' "kind_of_ballot_item": string (CANDIDATE, MEASURE),\n' \ '}' template_values = { 'api_name': 'voterStarOnSave', 'api_slug': 'voterStarOnSave', 'api_introduction': "Save or create private 'star on' state for the current voter for a measure, an office or candidate.", 'try_now_link': 'apis_v1:voterStarOnSaveView', 'try_now_link_variables_dict': try_now_link_variables_dict, 'url_root': url_root, 'get_or_post': 'GET', 'required_query_parameter_list': required_query_parameter_list, 'optional_query_parameter_list': optional_query_parameter_list, 'api_response': api_response, 'api_response_notes': "", 'potential_status_codes_list': potential_status_codes_list, } return template_values
import numpy as np from nose.tools import (assert_true, assert_false, assert_equal, assert_almost_equal) from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_) from dipy.sims.voxel import (_check_directions, SingleTensor, MultiTensor, multi_tensor_odf, all_tensor_evecs, add_noise, single_tensor, sticks_and_ball, multi_tensor_dki, kurtosis_element, DKI_signal) from dipy.core.geometry import (vec2vec_rotmat, sphere2cart) from dipy.data import get_data, get_sphere from dipy.core.gradients import gradient_table from dipy.io.gradients import read_bvals_bvecs fimg, fbvals, fbvecs = get_data('small_64D') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) gtab = gradient_table(bvals, bvecs) bvals_2s = np.concatenate((bvals, bvals * 2), axis=0) bvecs_2s = np.concatenate((bvecs, bvecs), axis=0) gtab_2s = gradient_table(bvals_2s, bvecs_2s) def diff2eigenvectors(dx, dy, dz): """ numerical derivatives 2 eigenvectors """ u = np.array([dx, dy, dz]) u = u / np.linalg.norm(u) R = vec2vec_rotmat(basis[:, 0], u) eig0 = u eig1 = np.dot(R, basis[:, 1]) eig2 = np.dot(R, basis[:, 2]) eigs = np.zeros((3, 3)) eigs[:, 0] = eig0 eigs[:, 1] = eig1 eigs[:, 2] = eig2 return eigs, R def test_check_directions(): # Testing spherical angles for two principal coordinate axis angles = [(0, 0)] # axis z sticks = _check_directions(angles) assert_array_almost_equal(sticks, [[0, 0, 1]]) angles = [(0, 90)] # axis z again (phi can be anything it theta is zero) sticks = _check_directions(angles) assert_array_almost_equal(sticks, [[0, 0, 1]]) angles = [(90, 0)] # axis x sticks = _check_directions(angles) assert_array_almost_equal(sticks, [[1, 0, 0]]) # Testing if directions are already given in cartesian coordinates angles = [(0, 0, 1)] sticks = _check_directions(angles) assert_array_almost_equal(sticks, [[0, 0, 1]]) # Testing more than one direction simultaneously angles = np.array([[90, 0], [30, 0]]) sticks = _check_directions(angles) ref_vec = [np.sin(np.pi*30/180), 0, np.cos(np.pi*30/180)] assert_array_almost_equal(sticks, [[1, 0, 0], ref_vec]) # Testing directions not aligned to planes x = 0, y = 0, or z = 0 the1 = 0 phi1 = 90 the2 = 30 phi2 = 45 angles = np.array([(the1, phi1), (the2, phi2)]) sticks = _check_directions(angles) ref_vec1 = (np.sin(np.pi*the1/180) * np.cos(np.pi*phi1/180), np.sin(np.pi*the1/180) * np.sin(np.pi*phi1/180), np.cos(np.pi*the1/180)) ref_vec2 = (np.sin(np.pi*the2/180) * np.cos(np.pi*phi2/180), np.sin(np.pi*the2/180) * np.sin(np.pi*phi2/180), np.cos(np.pi*the2/180)) assert_array_almost_equal(sticks, [ref_vec1, ref_vec2]) def test_sticks_and_ball(): d = 0.0015 S, sticks = sticks_and_ball(gtab, d=d, S0=1, angles=[(0, 0), ], fractions=[100], snr=None) assert_array_equal(sticks, [[0, 0, 1]]) S_st = SingleTensor(gtab, 1, evals=[d, 0, 0], evecs=[[0, 0, 0], [0, 0, 0], [1, 0, 0]]) assert_array_almost_equal(S, S_st) def test_single_tensor(): evals = np.array([1.4, .35, .35]) * 10 ** (-3) evecs = np.eye(3) S = SingleTensor(gtab, 100, evals, evecs, snr=None) assert_array_almost_equal(S[gtab.b0s_mask], 100) assert_(np.mean(S[~gtab.b0s_mask]) < 100) from dipy.reconst.dti import TensorModel m = TensorModel(gtab) t = m.fit(S) assert_array_almost_equal(t.fa, 0.707, decimal=3) def test_multi_tensor(): sphere = get_sphere('symmetric724') vertices = sphere.vertices mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) e0 = np.array([np.sqrt(2) / 2., np.sqrt(2) / 2., 0]) e1 = np.array([0, np.sqrt(2) / 2., np.sqrt(2) / 2.]) mevecs = [all_tensor_evecs(e0), all_tensor_evecs(e1)] # odf = multi_tensor_odf(vertices, [0.5, 0.5], mevals, mevecs) # assert_(odf.shape == (len(vertices),)) # assert_(np.all(odf <= 1) & np.all(odf >= 0)) fimg, fbvals, fbvecs = get_data('small_101D') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) gtab = gradient_table(bvals, bvecs) s1 = single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None) s2 = single_tensor(gtab, 100, mevals[1], mevecs[1], snr=None) Ssingle = 0.5*s1 + 0.5*s2 S, sticks = MultiTensor(gtab, mevals, S0=100, angles=[(90, 45), (45, 90)], fractions=[50, 50], snr=None) assert_array_almost_equal(S, Ssingle) def test_snr(): np.random.seed(1978) s = single_tensor(gtab) # For reasonably large SNR, var(signal) ~= sigma**2, where sigma = 1/SNR for snr in [5, 10, 20]: sigma = 1.0 / snr for j in range(1000): s_noise = add_noise(s, snr, 1, noise_type='rician') assert_array_almost_equal(np.var(s_noise - s), sigma ** 2, decimal=2) def test_all_tensor_evecs(): e0 = np.array([1/np.sqrt(2), 1/np.sqrt(2), 0]) desired = np.array([[1/np.sqrt(2), 1/np.sqrt(2), 0], [-1/np.sqrt(2), 1/np.sqrt(2), 0], [0, 0, 1]]).T assert_array_almost_equal(all_tensor_evecs(e0), desired) def test_kurtosis_elements(): """ Testing symmetry of the elements of the KT As an 4th order tensor, KT has 81 elements. However, due to diffusion symmetry the KT is fully characterized by 15 independent elements. This test checks for this property. """ # two fiber not aligned to planes x = 0, y = 0, or z = 0 mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087], [0.00099, 0, 0], [0.00226, 0.00087, 0.00087]]) angles = [(80, 10), (80, 10), (20, 30), (20, 30)] fie = 0.49 # intra axonal water fraction frac = [fie * 50, (1-fie) * 50, fie * 50, (1-fie) * 50] sticks = _check_directions(angles) mD = np.zeros((len(frac), 3, 3)) for i in range(len(frac)): R = all_tensor_evecs(sticks[i]) mD[i] = np.dot(np.dot(R, np.diag(mevals[i])), R.T) # compute global DT D = np.zeros((3, 3)) for i in range(len(frac)): D = D + frac[i]*mD[i] # compute voxel's MD MD = (D[0][0] + D[1][1] + D[2][2]) / 3 # Reference dictionary with the 15 independent elements. # Note: The multiplication of the indexes (i+1) * (j+1) * (k+1) * (l+1) # for of an elements is only equal to this multiplication for another # element if an only if the element corresponds to an symmetry element. # Thus indexes multiplication is used as key of the reference dictionary kt_ref = {1: kurtosis_element(mD, frac, 0, 0, 0, 0), 16: kurtosis_element(mD, frac, 1, 1, 1, 1), 81: kurtosis_element(mD, frac, 2, 2, 2, 2), 2: kurtosis_element(mD, frac, 0, 0, 0, 1), 3: kurtosis_element(mD, frac, 0, 0, 0, 2), 8: kurtosis_element(mD, frac, 0, 1, 1, 1), 24: kurtosis_element(mD, frac, 1, 1, 1, 2), 27: kurtosis_element(mD, frac, 0, 2, 2, 2), 54: kurtosis_element(mD, frac, 1, 2, 2, 2), 4: kurtosis_element(mD, frac, 0, 0, 1, 1), 9: kurtosis_element(mD, frac, 0, 0, 2, 2), 36: kurtosis_element(mD, frac, 1, 1, 2, 2), 6: kurtosis_element(mD, frac, 0, 0, 1, 2), 12: kurtosis_element(mD, frac, 0, 1, 1, 2), 18: kurtosis_element(mD, frac, 0, 1, 2, 2)} # Testing all 81 possible elements xyz = [0, 1, 2] for i in xyz: for j in xyz: for k in xyz: for l in xyz: key = (i+1) * (j+1) * (k+1) * (l+1) assert_almost_equal(kurtosis_element(mD, frac, i, k, j, l), kt_ref[key]) # Testing optional funtion inputs assert_almost_equal(kurtosis_element(mD, frac, i, k, j, l), kurtosis_element(mD, frac, i, k, j, l, D, MD)) def test_DKI_simulations_aligned_fibers(): """ Testing DKI simulations when aligning the same fiber to different axis. If biological parameters don't change, kt[0] of a fiber aligned to axis x has to be equal to kt[1] of a fiber aligned to the axis y and equal to kt[2] of a fiber aligned to axis z. The same is applicable for dt """ # Defining parameters based on Neto Henriques et al., 2015. NeuroImage 111 mevals = np.array([[0.00099, 0, 0], # Intra-cellular [0.00226, 0.00087, 0.00087]]) # Extra-cellular frac = [49, 51] # Compartment volume fraction # axis x angles = [(90, 0), (90, 0)] signal_fx, dt_fx, kt_fx = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac) # axis y angles = [(90, 90), (90, 90)] signal_fy, dt_fy, kt_fy = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac) # axis z angles = [(0, 0), (0, 0)] signal_fz, dt_fz, kt_fz = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac) assert_array_equal([kt_fx[0], kt_fx[1], kt_fx[2]], [kt_fy[1], kt_fy[0], kt_fy[2]]) assert_array_equal([kt_fx[0], kt_fx[1], kt_fx[2]], [kt_fz[2], kt_fz[0], kt_fz[1]]) assert_array_equal([dt_fx[0], dt_fx[2], dt_fx[5]], [dt_fy[2], dt_fy[0], dt_fy[5]]) assert_array_equal([dt_fx[0], dt_fx[2], dt_fx[5]], [dt_fz[5], dt_fz[0], dt_fz[2]]) # testing S signal along axis x, y and z bvals = np.array([0, 0, 0, 1000, 1000, 1000, 2000, 2000, 2000]) bvecs = np.asarray([[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1]]) gtab_axis = gradient_table(bvals, bvecs) # axis x S_fx = DKI_signal(gtab_axis, dt_fx, kt_fx, S0=100) assert_array_almost_equal(S_fx[0:3], [100, 100, 100]) # test S f0r b=0 # axis y S_fy = DKI_signal(gtab_axis, dt_fy, kt_fy, S0=100) assert_array_almost_equal(S_fy[0:3], [100, 100, 100]) # test S f0r b=0 # axis z S_fz = DKI_signal(gtab_axis, dt_fz, kt_fz, S0=100) assert_array_almost_equal(S_fz[0:3], [100, 100, 100]) # test S f0r b=0 # test S for b = 1000 assert_array_almost_equal([S_fx[3], S_fx[4], S_fx[5]], [S_fy[4], S_fy[3], S_fy[5]]) assert_array_almost_equal([S_fx[3], S_fx[4], S_fx[5]], [S_fz[5], S_fz[3], S_fz[4]]) # test S for b = 2000 assert_array_almost_equal([S_fx[6], S_fx[7], S_fx[8]], [S_fy[7], S_fy[6], S_fy[8]]) assert_array_almost_equal([S_fx[6], S_fx[7], S_fx[8]], [S_fz[8], S_fz[6], S_fz[7]]) def test_DKI_crossing_fibers_simulations(): """ Testing DKI simulations of a crossing fiber """ # two fiber not aligned to planes x = 0, y = 0, or z = 0 mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087], [0.00099, 0, 0], [0.00226, 0.00087, 0.00087]]) angles = [(80, 10), (80, 10), (20, 30), (20, 30)] fie = 0.49 frac = [fie*50, (1 - fie)*50, fie*50, (1 - fie)*50] signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) # in this simulations dt and kt cannot have zero elements for i in range(len(dt)): assert dt[i] != 0 for i in range(len(kt)): assert kt[i] != 0 # test S, dt and kt relative to the expected values computed from another # DKI package - UDKI (Neto Henriques et al., 2015) dt_ref = [1.0576161e-3, 0.1292542e-3, 0.4786179e-3, 0.2667081e-3, 0.1136643e-3, 0.9888660e-3] kt_ref = [2.3529944, 0.8226448, 2.3011221, 0.2017312, -0.0437535, 0.0404011, 0.0355281, 0.2449859, 0.2157668, 0.3495910, 0.0413366, 0.3461519, -0.0537046, 0.0133414, -0.017441] assert_array_almost_equal(dt, dt_ref) assert_array_almost_equal(kt, kt_ref) assert_array_almost_equal(signal, DKI_signal(gtab_2s, dt_ref, kt_ref, S0=100, snr=None), decimal=5) if __name__ == "__main__": test_multi_tensor()
'''' Tune the 3 most promissing algorithms and compare them ''' import os import time import pandas import numpy import matplotlib.pyplot as plt from pandas.tools.plotting import scatter_matrix from pandas import DataFrame from sklearn.preprocessing import StandardScaler from sklearn.preprocessing import MinMaxScaler from sklearn import cross_validation from sklearn.metrics import classification_report from sklearn.metrics import confusion_matrix from sklearn.metrics import accuracy_score from sklearn.pipeline import Pipeline from sklearn.grid_search import GridSearchCV from sklearn.linear_model import LogisticRegression from sklearn.tree import DecisionTreeClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.naive_bayes import GaussianNB from sklearn.svm import SVC from sklearn.ensemble import AdaBoostClassifier from sklearn.ensemble import GradientBoostingClassifier from sklearn.ensemble import RandomForestClassifier from sklearn.ensemble import ExtraTreesClassifier from sklearn.datasets import load_digits from sklearn.model_selection import GridSearchCV from sklearn.decomposition import PCA, NMF from sklearn.feature_selection import SelectKBest, chi2 import lib.eda1 as eda1 import lib.eda3 as eda3 N_DIGITS = 3 NUM_FOLDS = 10 RAND_SEED = 7 SCORING = 'accuracy' VALIDATION_SIZE = 0.20 N_JOBS = 6 start = time.clock() imageidx = 1 createImages = True results = [] names = [] params = [] bestResults = [] def tuneRF(X_train, Y_train, outputPath): global results, names, params, bestResults print 'tune LR (Random Forest Classifier)' pipeline = Pipeline([('PCA', PCA()),('MinMaxScaler', MinMaxScaler(feature_range=(0, 1))),('Scaler', StandardScaler())]) scaler = pipeline.fit(X_train) rescaledX = scaler.transform(X_train) #tune para meters # http://scikit-learn.org/stable/modules/generated/sklearn.ensemble.RandomForestClassifier.html #n_estimators_values = [5, 10, 100, 1000, 3000] n_estimators_values = [1000] max_features_values = [0.1, 'auto', 'sqrt', 'log2', None] # (float)0.1=>10% criterion_values = ['gini', 'entropy'] param_grid = dict(n_estimators=n_estimators_values, max_features=max_features_values, criterion=criterion_values) model = RandomForestClassifier() kfold = cross_validation.KFold(n=len(X_train), n_folds=NUM_FOLDS, random_state=RAND_SEED) grid = GridSearchCV(n_jobs=N_JOBS, verbose=10, estimator=model, param_grid=param_grid, scoring=SCORING, cv=kfold) grid_result = grid.fit(rescaledX, Y_train) print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_)) best_idx = grid_result.best_index_ #TODO: check it out if 'mean_test_score' is really what I want here cv_results = grid_result.cv_results_['mean_test_score'] results.append(cv_results) grid_scores = sorted(grid_result.grid_scores_, key=lambda x: x[2].mean(), reverse=True) first = True for param, mean_score, scores in grid_scores: if first: bestResults.append({'name':'RF', 'mean':scores.mean(), 'std':scores.std(), 'params':param}) first = False print("%f (%f) with: %r" % (scores.mean(), scores.std(), param)) def tuneET(X_train, Y_train, outputPath): global results, names, params, bestResults print 'tune ET (Extra Trees Classifier)' pipeline = Pipeline([('PCA', PCA()),('MinMaxScaler', MinMaxScaler(feature_range=(0, 1))),('Scaler', StandardScaler())]) scaler = pipeline.fit(X_train) rescaledX = scaler.transform(X_train) #tune para meters # http://scikit-learn.org/stable/modules/generated/sklearn.ensemble.ExtraTreesClassifier.html #n_estimators_values = [5, 10, 100, 1000, 3000] n_estimators_values = [1000] max_features_values = [0.1, 'auto', 'sqrt', 'log2', None] # (float)0.1=>10% criterion_values = ['gini', 'entropy'] param_grid = dict(n_estimators=n_estimators_values, max_features=max_features_values, criterion=criterion_values) model = ExtraTreesClassifier() kfold = cross_validation.KFold(n=len(X_train), n_folds=NUM_FOLDS, random_state=RAND_SEED) grid = GridSearchCV(n_jobs=N_JOBS, verbose=10, estimator=model, param_grid=param_grid, scoring=SCORING, cv=kfold) grid_result = grid.fit(rescaledX, Y_train) print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_)) best_idx = grid_result.best_index_ #TODO: check it out if 'mean_test_score' is really what a want here cv_results = grid_result.cv_results_['mean_test_score'] results.append(cv_results) grid_scores = sorted(grid_result.grid_scores_, key=lambda x: x[2].mean(), reverse=True) first = True for param, mean_score, scores in grid_scores: if first: bestResults.append({'name':'ET', 'mean':scores.mean(), 'std':scores.std(), 'params':param}) first = False print("%f (%f) with: %r" % (scores.mean(), scores.std(), param)) def tuneSVM(X_train, Y_train, outputPath): global results, names, params, bestResults print 'tune SVM (Support Vector Machines Classifier)' pipeline = Pipeline([('PCA', PCA()),('MinMaxScaler', MinMaxScaler(feature_range=(0, 1))),('Scaler', StandardScaler())]) scaler = pipeline.fit(X_train) rescaledX = scaler.transform(X_train) #c_values = [0.1, 1.0, 100.0, 10000.0, 100000.0] c_values = [10000.0, 100000.0] kernel_values = ['linear', 'poly', 'rbf', 'sigmoid'] param_grid = dict(C=c_values, kernel=kernel_values) model = SVC() kfold = cross_validation.KFold(n=len(X_train), n_folds=NUM_FOLDS, random_state=RAND_SEED) grid = GridSearchCV(n_jobs=N_JOBS, verbose=10, estimator=model, param_grid=param_grid, scoring=SCORING, cv=kfold) grid_result = grid.fit(rescaledX, Y_train) print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_)) best_idx = grid_result.best_index_ #TODO: check it out if 'mean_test_score' is really what a want here cv_results = grid_result.cv_results_['mean_test_score'] results.append(cv_results) grid_scores = sorted(grid_result.grid_scores_, key=lambda x: x[2].mean(), reverse=True) first = True for param, mean_score, scores in grid_scores: if first: bestResults.append({'name':'SVM', 'mean':scores.mean(), 'std':scores.std(), 'params':param}) first = False print("%f (%f) with: %r" % (scores.mean(), scores.std(), param)) def drawTunedAlgorithmsComparison(results, names, outputPath): global imageidx print '\n === Tuned Algorithms Comparison ===\n' #print bestResults for x in bestResults: print x # Compare Algorithms if (createImages): fig = plt.figure() fig.suptitle('Final Tuned-Algorithms Comparison') ax = fig.add_subplot(111) plt.boxplot(results) ax.set_xticklabels(names) #plt.show() plt.savefig(outputPath + str(imageidx).zfill(N_DIGITS) + '-Tuned-Algorithm-Comparison.png') imageidx += 1 plt.close('all') def set_createImages(value): global createImages createImages = value def run(inputFilePath, outputPath, createImagesFlag, dropColumns): global start print '####################################################################' print '############### Running Exploratory Data Analysis #4 ###############' print '####################################################################' print '' set_createImages(createImagesFlag) start = time.clock() eda1.reset_imageidx() eda1.set_createImages(createImagesFlag) if not os.path.exists(outputPath): os.makedirs(outputPath) # Load dataset dataframe = eda1.loadDataframe(inputFilePath) # drop out 'not fair' features dataframe = eda1.dataCleansing(dataframe, dropColumns) #Split-out train/validation dataset X_train, X_validation, Y_train, Y_validation = eda1.splitoutValidationDataset(dataframe) ''' # tune each algorithm try: tuneRF(X_train, Y_train, outputPath) except Exception as e: print "ERROR: couldn't tune RF" print "Message: %s" % str(e) try: tuneET(X_train, Y_train, outputPath) except Exception as e: print "ERROR: couldn't tune ET" print "Message: %s" % str(e) ''' try: tuneSVM(X_train, Y_train, outputPath) except Exception as e: print "ERROR: couldn't tune SVM" print "Message: %s" % str(e) #print the results comparing the algorithms with the best tune for each one drawTunedAlgorithmsComparison(results, names, outputPath) print '\n<<< THEN END - Running Exploratory Data Analysis #4 >>>' #RF - Best: 0.853451 using {'max_features': 'log2', 'n_estimators': 1000, 'criterion': 'gini'} #ET - Best: 0.855320 using {'max_features': None, 'n_estimators': 1000, 'criterion': 'gini'}
import numpy as np from scipy.linalg import norm from .base import AppearanceLucasKanade class SimultaneousForwardAdditive(AppearanceLucasKanade): @property def algorithm(self): return 'Simultaneous-FA' def _fit(self, lk_fitting, max_iters=20, project=True): # Initial error > eps error = self.eps + 1 image = lk_fitting.image lk_fitting.weights = [] n_iters = 0 # Number of shape weights n_params = self.transform.n_parameters # Initial appearance weights if project: # Obtained weights by projection IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) weights = self.appearance_model.project(IWxp) # Reset template self.template = self.appearance_model.instance(weights) else: # Set all weights to 0 (yielding the mean) weights = np.zeros(self.appearance_model.n_active_components) lk_fitting.weights.append(weights) # Compute appearance model Jacobian wrt weights appearance_jacobian = self.appearance_model._jacobian.T # Forward Additive Algorithm while n_iters < max_iters and error > self.eps: # Compute warped image with current weights IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) # Compute warp Jacobian dW_dp = self.transform.jacobian( self.template.mask.true_indices) # Compute steepest descent images, VI_dW_dp J = self.residual.steepest_descent_images( image, dW_dp, forward=(self.template, self.transform, self.interpolator)) # Concatenate VI_dW_dp with appearance model Jacobian self._J = np.hstack((J, appearance_jacobian)) # Compute Hessian and inverse self._H = self.residual.calculate_hessian(self._J) # Compute steepest descent parameter updates sd_delta_p = self.residual.steepest_descent_update( self._J, self.template, IWxp) # Compute gradient descent parameter updates delta_p = np.real(self._calculate_delta_p(sd_delta_p)) # Update warp weights parameters = self.transform.as_vector() + delta_p[:n_params] self.transform.from_vector_inplace(parameters) lk_fitting.parameters.append(parameters) # Update appearance weights weights -= delta_p[n_params:] self.template = self.appearance_model.instance(weights) lk_fitting.weights.append(weights) # Test convergence error = np.abs(norm(delta_p)) n_iters += 1 lk_fitting.fitted = True return lk_fitting class SimultaneousForwardCompositional(AppearanceLucasKanade): @property def algorithm(self): return 'Simultaneous-FC' def _set_up(self): # Compute warp Jacobian self._dW_dp = self.transform.jacobian( self.template.mask.true_indices) def _fit(self, lk_fitting, max_iters=20, project=True): # Initial error > eps error = self.eps + 1 image = lk_fitting.image lk_fitting.weights = [] n_iters = 0 # Number of shape weights n_params = self.transform.n_parameters # Initial appearance weights if project: # Obtained weights by projection IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) weights = self.appearance_model.project(IWxp) # Reset template self.template = self.appearance_model.instance(weights) else: # Set all weights to 0 (yielding the mean) weights = np.zeros(self.appearance_model.n_active_components) lk_fitting.weights.append(weights) # Compute appearance model Jacobian wrt weights appearance_jacobian = self.appearance_model._jacobian.T # Forward Additive Algorithm while n_iters < max_iters and error > self.eps: # Compute warped image with current weights IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) # Compute steepest descent images, VI_dW_dp J = self.residual.steepest_descent_images(IWxp, self._dW_dp) # Concatenate VI_dW_dp with appearance model Jacobian self._J = np.hstack((J, appearance_jacobian)) # Compute Hessian and inverse self._H = self.residual.calculate_hessian(self._J) # Compute steepest descent parameter updates sd_delta_p = self.residual.steepest_descent_update( self._J, self.template, IWxp) # Compute gradient descent parameter updates delta_p = np.real(self._calculate_delta_p(sd_delta_p)) # Update warp weights self.transform.compose_after_from_vector_inplace(delta_p[:n_params]) lk_fitting.parameters.append(self.transform.as_vector()) # Update appearance weights weights -= delta_p[n_params:] self.template = self.appearance_model.instance(weights) lk_fitting.weights.append(weights) # Test convergence error = np.abs(norm(delta_p)) n_iters += 1 lk_fitting.fitted = True return lk_fitting class SimultaneousInverseCompositional(AppearanceLucasKanade): @property def algorithm(self): return 'Simultaneous-IA' def _set_up(self): # Compute the Jacobian of the warp self._dW_dp = self.transform.jacobian( self.appearance_model.mean.mask.true_indices) def _fit(self, lk_fitting, max_iters=20, project=True): # Initial error > eps error = self.eps + 1 image = lk_fitting.image lk_fitting.weights = [] n_iters = 0 # Number of shape weights n_params = self.transform.n_parameters # Initial appearance weights if project: # Obtained weights by projection IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) weights = self.appearance_model.project(IWxp) # Reset template self.template = self.appearance_model.instance(weights) else: # Set all weights to 0 (yielding the mean) weights = np.zeros(self.appearance_model.n_active_components) lk_fitting.weights.append(weights) # Compute appearance model Jacobian wrt weights appearance_jacobian = -self.appearance_model._jacobian.T # Baker-Matthews, Inverse Compositional Algorithm while n_iters < max_iters and error > self.eps: # Compute warped image with current weights IWxp = image.warp_to(self.template.mask, self.transform, interpolator=self.interpolator) # Compute steepest descent images, VT_dW_dp J = self.residual.steepest_descent_images(self.template, self._dW_dp) # Concatenate VI_dW_dp with appearance model Jacobian self._J = np.hstack((J, appearance_jacobian)) # Compute Hessian and inverse self._H = self.residual.calculate_hessian(self._J) # Compute steepest descent parameter updates sd_delta_p = self.residual.steepest_descent_update( self._J, IWxp, self.template) # Compute gradient descent parameter updates delta_p = -np.real(self._calculate_delta_p(sd_delta_p)) # Update warp weights self.transform.compose_after_from_vector_inplace(delta_p[:n_params]) lk_fitting.parameters.append(self.transform.as_vector()) # Update appearance weights weights -= delta_p[n_params:] self.template = self.appearance_model.instance(weights) lk_fitting.weights.append(weights) # Test convergence error = np.abs(norm(delta_p)) n_iters += 1 lk_fitting.fitted = True return lk_fitting
"""Provides fakes for several of Telemetry's internal objects. These allow code like story_runner and Benchmark to be run and tested without compiling or starting a browser. Class names prepended with an underscore are intended to be implementation details, and should not be subclassed; however, some, like _FakeBrowser, have public APIs that may need to be called in tests. """ from telemetry.internal.backends.chrome_inspector import websocket from telemetry.internal.browser import browser_options from telemetry.internal.platform import system_info from telemetry.page import shared_page_state from telemetry.util import image_util from telemetry.testing.internal import fake_gpu_info from types import ModuleType class FakePlatform(object): def __init__(self): self._network_controller = None self._tracing_controller = None self._has_battor = False self._os_name = 'FakeOS' self._device_type_name = 'abc' self._is_svelte = False self._is_aosp = True @property def is_host_platform(self): raise NotImplementedError @property def network_controller(self): if self._network_controller is None: self._network_controller = _FakeNetworkController() return self._network_controller @property def tracing_controller(self): if self._tracing_controller is None: self._tracing_controller = _FakeTracingController() return self._tracing_controller def Initialize(self): pass def CanMonitorThermalThrottling(self): return False def IsThermallyThrottled(self): return False def HasBeenThermallyThrottled(self): return False def GetArchName(self): raise NotImplementedError def SetOSName(self, name): self._os_name = name def GetOSName(self): return self._os_name def GetOSVersionName(self): raise NotImplementedError def GetOSVersionDetailString(self): raise NotImplementedError def StopAllLocalServers(self): pass def WaitForBatteryTemperature(self, _): pass def HasBattOrConnected(self): return self._has_battor def SetBattOrDetected(self, b): assert isinstance(b, bool) self._has_battor = b # TODO(rnephew): Investigate moving from setters to @property. def SetDeviceTypeName(self, name): self._device_type_name = name def GetDeviceTypeName(self): return self._device_type_name def SetIsSvelte(self, b): assert isinstance(b, bool) self._is_svelte = b def IsSvelte(self): if self._os_name != 'android': raise NotImplementedError return self._is_svelte def SetIsAosp(self, b): assert isinstance(b, bool) self._is_aosp = b def IsAosp(self): return self._is_aosp and self._os_name == 'android' class FakeLinuxPlatform(FakePlatform): def __init__(self): super(FakeLinuxPlatform, self).__init__() self.screenshot_png_data = None self.http_server_directories = [] self.http_server = FakeHTTPServer() @property def is_host_platform(self): return True def GetDeviceTypeName(self): return 'Desktop' def GetArchName(self): return 'x86_64' def GetOSName(self): return 'linux' def GetOSVersionName(self): return 'trusty' def GetOSVersionDetailString(self): return '' def CanTakeScreenshot(self): return bool(self.screenshot_png_data) def TakeScreenshot(self, file_path): if not self.CanTakeScreenshot(): raise NotImplementedError img = image_util.FromBase64Png(self.screenshot_png_data) image_util.WritePngFile(img, file_path) return True def SetHTTPServerDirectories(self, paths): self.http_server_directories.append(paths) class FakeHTTPServer(object): def UrlOf(self, url): del url # unused return 'file:///foo' class FakePossibleBrowser(object): def __init__(self, execute_on_startup=None, execute_after_browser_creation=None): self._returned_browser = _FakeBrowser(FakeLinuxPlatform()) self.browser_type = 'linux' self.supports_tab_control = False self.is_remote = False self.execute_on_startup = execute_on_startup self.execute_after_browser_creation = execute_after_browser_creation @property def returned_browser(self): """The browser object that will be returned through later API calls.""" return self._returned_browser def Create(self, finder_options): if self.execute_on_startup is not None: self.execute_on_startup() del finder_options # unused if self.execute_after_browser_creation is not None: self.execute_after_browser_creation(self._returned_browser) return self.returned_browser @property def platform(self): """The platform object from the returned browser. To change this or set it up, change the returned browser's platform. """ return self.returned_browser.platform def IsRemote(self): return self.is_remote def SetCredentialsPath(self, _): pass class FakeSharedPageState(shared_page_state.SharedPageState): def __init__(self, test, finder_options, story_set): super(FakeSharedPageState, self).__init__(test, finder_options, story_set) def _GetPossibleBrowser(self, test, finder_options): p = FakePossibleBrowser() self.ConfigurePossibleBrowser(p) return p def ConfigurePossibleBrowser(self, possible_browser): """Override this to configure the PossibleBrowser. Can make changes to the browser's configuration here via e.g.: possible_browser.returned_browser.returned_system_info = ... """ pass def DidRunStory(self, results): # TODO(kbr): add a test which throws an exception from DidRunStory # to verify the fix from https://crrev.com/86984d5fc56ce00e7b37ebe . super(FakeSharedPageState, self).DidRunStory(results) class FakeSystemInfo(system_info.SystemInfo): def __init__(self, model_name='', gpu_dict=None, command_line=''): if gpu_dict == None: gpu_dict = fake_gpu_info.FAKE_GPU_INFO super(FakeSystemInfo, self).__init__(model_name, gpu_dict, command_line) class _FakeBrowserFinderOptions(browser_options.BrowserFinderOptions): def __init__(self, execute_on_startup=None, execute_after_browser_creation=None, *args, **kwargs): browser_options.BrowserFinderOptions.__init__(self, *args, **kwargs) self.fake_possible_browser = \ FakePossibleBrowser( execute_on_startup=execute_on_startup, execute_after_browser_creation=execute_after_browser_creation) def CreateBrowserFinderOptions(browser_type=None, execute_on_startup=None, execute_after_browser_creation=None): """Creates fake browser finder options for discovering a browser.""" return _FakeBrowserFinderOptions( browser_type=browser_type, execute_on_startup=execute_on_startup, execute_after_browser_creation=execute_after_browser_creation) class _FakeBrowser(object): def __init__(self, platform): self._tabs = _FakeTabList(self) # Fake the creation of the first tab. self._tabs.New() self._returned_system_info = FakeSystemInfo() self._platform = platform self._browser_type = 'release' self._is_crashed = False @property def platform(self): return self._platform @platform.setter def platform(self, incoming): """Allows overriding of the fake browser's platform object.""" assert isinstance(incoming, FakePlatform) self._platform = incoming @property def returned_system_info(self): """The object which will be returned from calls to GetSystemInfo.""" return self._returned_system_info @returned_system_info.setter def returned_system_info(self, incoming): """Allows overriding of the returned SystemInfo object. Incoming argument must be an instance of FakeSystemInfo.""" assert isinstance(incoming, FakeSystemInfo) self._returned_system_info = incoming @property def browser_type(self): """The browser_type this browser claims to be ('debug', 'release', etc.)""" return self._browser_type @browser_type.setter def browser_type(self, incoming): """Allows setting of the browser_type.""" self._browser_type = incoming @property def credentials(self): return _FakeCredentials() def Close(self): self._is_crashed = False @property def supports_system_info(self): return True def GetSystemInfo(self): return self.returned_system_info @property def supports_tab_control(self): return True @property def tabs(self): return self._tabs def DumpStateUponFailure(self): pass class _FakeCredentials(object): def WarnIfMissingCredentials(self, _): pass class _FakeTracingController(object): def __init__(self): self._is_tracing = False def StartTracing(self, tracing_config, timeout=10): self._is_tracing = True del tracing_config del timeout def StopTracing(self): self._is_tracing = False @property def is_tracing_running(self): return self._is_tracing def ClearStateIfNeeded(self): pass def IsChromeTracingSupported(self): return True class _FakeNetworkController(object): def __init__(self): self.wpr_mode = None self.extra_wpr_args = None self.is_initialized = False self.is_open = False self.use_live_traffic = None def InitializeIfNeeded(self, use_live_traffic=False): self.use_live_traffic = use_live_traffic def UpdateTrafficSettings(self, round_trip_latency_ms=None, download_bandwidth_kbps=None, upload_bandwidth_kbps=None): pass def Open(self, wpr_mode, extra_wpr_args, use_wpr_go=False): del use_wpr_go # Unused. self.wpr_mode = wpr_mode self.extra_wpr_args = extra_wpr_args self.is_open = True def Close(self): self.wpr_mode = None self.extra_wpr_args = None self.is_initialized = False self.is_open = False def StartReplay(self, archive_path, make_javascript_deterministic=False): del make_javascript_deterministic # Unused. assert self.is_open self.is_initialized = archive_path is not None def StopReplay(self): self.is_initialized = False class _FakeTab(object): def __init__(self, browser, tab_id): self._browser = browser self._tab_id = str(tab_id) self._collect_garbage_count = 0 self.test_png = None @property def collect_garbage_count(self): return self._collect_garbage_count @property def id(self): return self._tab_id @property def browser(self): return self._browser def WaitForDocumentReadyStateToBeComplete(self, timeout=0): pass def Navigate(self, url, script_to_evaluate_on_commit=None, timeout=0): del script_to_evaluate_on_commit, timeout # unused if url == 'chrome://crash': self.browser._is_crashed = True raise Exception def WaitForDocumentReadyStateToBeInteractiveOrBetter(self, timeout=0): pass def WaitForFrameToBeDisplayed(self, timeout=0): pass def IsAlive(self): return True def CloseConnections(self): pass def CollectGarbage(self): self._collect_garbage_count += 1 def Close(self): pass @property def screenshot_supported(self): return self.test_png is not None def Screenshot(self): assert self.screenshot_supported, 'Screenshot is not supported' return image_util.FromBase64Png(self.test_png) class _FakeTabList(object): _current_tab_id = 0 def __init__(self, browser): self._tabs = [] self._browser = browser def New(self, timeout=300): del timeout # unused type(self)._current_tab_id += 1 t = _FakeTab(self._browser, type(self)._current_tab_id) self._tabs.append(t) return t def __iter__(self): return self._tabs.__iter__() def __len__(self): return len(self._tabs) def __getitem__(self, index): if self._tabs[index].browser._is_crashed: raise Exception else: return self._tabs[index] def GetTabById(self, identifier): """The identifier of a tab can be accessed with tab.id.""" for tab in self._tabs: if tab.id == identifier: return tab return None class FakeInspectorWebsocket(object): _NOTIFICATION_EVENT = 1 _NOTIFICATION_CALLBACK = 2 """A fake InspectorWebsocket. A fake that allows tests to send pregenerated data. Normal InspectorWebsockets allow for any number of domain handlers. This fake only allows up to 1 domain handler, and assumes that the domain of the response always matches that of the handler. """ def __init__(self, mock_timer): self._mock_timer = mock_timer self._notifications = [] self._response_handlers = {} self._pending_callbacks = {} self._handler = None def RegisterDomain(self, _, handler): self._handler = handler def AddEvent(self, method, params, time): if self._notifications: assert self._notifications[-1][1] < time, ( 'Current response is scheduled earlier than previous response.') response = {'method': method, 'params': params} self._notifications.append((response, time, self._NOTIFICATION_EVENT)) def AddAsyncResponse(self, method, result, time): if self._notifications: assert self._notifications[-1][1] < time, ( 'Current response is scheduled earlier than previous response.') response = {'method': method, 'result': result} self._notifications.append((response, time, self._NOTIFICATION_CALLBACK)) def AddResponseHandler(self, method, handler): self._response_handlers[method] = handler def SyncRequest(self, request, *args, **kwargs): del args, kwargs # unused handler = self._response_handlers[request['method']] return handler(request) if handler else None def AsyncRequest(self, request, callback): self._pending_callbacks.setdefault(request['method'], []).append(callback) def SendAndIgnoreResponse(self, request): pass def Connect(self, _): pass def DispatchNotifications(self, timeout): current_time = self._mock_timer.time() if not self._notifications: self._mock_timer.SetTime(current_time + timeout + 1) raise websocket.WebSocketTimeoutException() response, time, kind = self._notifications[0] if time - current_time > timeout: self._mock_timer.SetTime(current_time + timeout + 1) raise websocket.WebSocketTimeoutException() self._notifications.pop(0) self._mock_timer.SetTime(time + 1) if kind == self._NOTIFICATION_EVENT: self._handler(response) elif kind == self._NOTIFICATION_CALLBACK: callback = self._pending_callbacks.get(response['method']).pop(0) callback(response) else: raise Exception('Unexpected response type') class FakeTimer(object): """ A fake timer to fake out the timing for a module. Args: module: module to fake out the time """ def __init__(self, module=None): self._elapsed_time = 0 self._module = module self._actual_time = None if module: assert isinstance(module, ModuleType) self._actual_time = module.time self._module.time = self def sleep(self, time): self._elapsed_time += time def time(self): return self._elapsed_time def SetTime(self, time): self._elapsed_time = time def __del__(self): self.Restore() def Restore(self): if self._module: self._module.time = self._actual_time self._module = None self._actual_time = None
""" Quality Control based on fuzzy logic. """ import logging import numpy as np from .core import QCCheckVar from .gradient import gradient from .spike import spike from .woa_normbias import woa_normbias from cotede.fuzzy import fuzzy_uncertainty module_logger = logging.getLogger(__name__) def fuzzylogic(features, cfg, require="all"): """ FIXME: Think about, should I return 0, or have an assert, and at qc.py all qc tests are applied with a try, and in case it fails it flag 0s. """ require = cfg.get("require", require) if (require == "all") and not np.all([f in features for f in cfg["features"]]): module_logger.warning( "Not all features (%s) required by fuzzy logic are available".format( cfg["features"].keys() ) ) raise KeyError uncertainty = fuzzy_uncertainty( data=features, features=cfg["features"], output=cfg["output"], require=require ) return uncertainty class FuzzyLogic(QCCheckVar): def set_features(self): self.features = {} for v in [f for f in self.cfg["features"] if f not in self.features]: if v == "woa_bias": woa_comparison = woa_normbias(self.data, self.varname, self.attrs) self.features[v] = woa_comparison["woa_bias"] elif v == "woa_normbias": woa_comparison = woa_normbias(self.data, self.varname, self.attrs) self.features[v] = woa_comparison["woa_normbias"] elif v == "spike": self.features[v] = spike(self.data[self.varname]) elif v == "gradient": self.features[v] = gradient(self.data[self.varname]) self.features["fuzzylogic"] = fuzzylogic(self.features, self.cfg) def test(self): self.flags = {} cfg = self.cfg flag = np.zeros(np.shape(self.data[self.varname]), dtype="i1") uncertainty = self.features["fuzzylogic"] # FIXME: As it is now, it will have no zero flag value. Think about cases # where some values in a profile would not be estimated, hence flag=0 # I needed to use np.nonzeros because now uncertainty is a masked array, # to accept when a feature is masked. flag[np.nonzero(uncertainty <= 0.29)] = 1 flag[np.nonzero((uncertainty > 0.29) & (uncertainty <= 0.34))] = 2 flag[np.nonzero((uncertainty > 0.34) & (uncertainty <= 0.72))] = 3 flag[np.nonzero(uncertainty > 0.72)] = 4 self.flags["fuzzylogic"] = flag
from setuptools import setup, find_packages setup(name='gelato.models', version='0.1.2', description='Gelato models', namespace_packages=['gelato'], long_description='', author='', author_email='', license='', url='', include_package_data=True, packages=find_packages(exclude=['tests']), install_requires=['django', 'tower'])
import sys import warnings try: import itertools.izip as zip except ImportError: pass from itertools import product import numpy as np from .. import util from ..dimension import dimension_name from ..element import Element from ..ndmapping import NdMapping, item_check, sorted_context from .interface import DataError, Interface from .pandas import PandasInterface from .util import finite_range class cuDFInterface(PandasInterface): """ The cuDFInterface allows a Dataset objects to wrap a cuDF DataFrame object. Using cuDF allows working with columnar data on a GPU. Most operations leave the data in GPU memory, however to plot the data it has to be loaded into memory. The cuDFInterface covers almost the complete API exposed by the PandasInterface with two notable exceptions: 1) Aggregation and groupby do not have a consistent sort order (see https://github.com/rapidsai/cudf/issues/4237) 3) Not all functions can be easily applied to a cuDF so some functions applied with aggregate and reduce will not work. """ datatype = 'cuDF' types = () @classmethod def loaded(cls): return 'cudf' in sys.modules @classmethod def applies(cls, obj): if not cls.loaded(): return False import cudf return isinstance(obj, (cudf.DataFrame, cudf.Series)) @classmethod def init(cls, eltype, data, kdims, vdims): import cudf import pandas as pd element_params = eltype.param.objects() kdim_param = element_params['kdims'] vdim_param = element_params['vdims'] if isinstance(data, (cudf.Series, pd.Series)): data = data.to_frame() if not isinstance(data, cudf.DataFrame): data, _, _ = PandasInterface.init(eltype, data, kdims, vdims) data = cudf.from_pandas(data) columns = list(data.columns) ncols = len(columns) index_names = [data.index.name] if index_names == [None]: index_names = ['index'] if eltype._auto_indexable_1d and ncols == 1 and kdims is None: kdims = list(index_names) if isinstance(kdim_param.bounds[1], int): ndim = min([kdim_param.bounds[1], len(kdim_param.default)]) else: ndim = None nvdim = vdim_param.bounds[1] if isinstance(vdim_param.bounds[1], int) else None if kdims and vdims is None: vdims = [c for c in columns if c not in kdims] elif vdims and kdims is None: kdims = [c for c in columns if c not in vdims][:ndim] elif kdims is None: kdims = list(columns[:ndim]) if vdims is None: vdims = [d for d in columns[ndim:((ndim+nvdim) if nvdim else None)] if d not in kdims] elif kdims == [] and vdims is None: vdims = list(columns[:nvdim if nvdim else None]) # Handle reset of index if kdims reference index by name for kd in kdims: kd = dimension_name(kd) if kd in columns: continue if any(kd == ('index' if name is None else name) for name in index_names): data = data.reset_index() break if any(isinstance(d, (np.int64, int)) for d in kdims+vdims): raise DataError("cudf DataFrame column names used as dimensions " "must be strings not integers.", cls) if kdims: kdim = dimension_name(kdims[0]) if eltype._auto_indexable_1d and ncols == 1 and kdim not in columns: data = data.copy() data.insert(0, kdim, np.arange(len(data))) for d in kdims+vdims: d = dimension_name(d) if len([c for c in columns if c == d]) > 1: raise DataError('Dimensions may not reference duplicated DataFrame ' 'columns (found duplicate %r columns). If you want to plot ' 'a column against itself simply declare two dimensions ' 'with the same name. '% d, cls) return data, {'kdims':kdims, 'vdims':vdims}, {} @classmethod def range(cls, dataset, dimension): dimension = dataset.get_dimension(dimension, strict=True) column = dataset.data[dimension.name] if dimension.nodata is not None: column = cls.replace_value(column, dimension.nodata) if column.dtype.kind == 'O': return np.NaN, np.NaN else: return finite_range(column, column.min(), column.max()) @classmethod def values(cls, dataset, dim, expanded=True, flat=True, compute=True, keep_index=False): dim = dataset.get_dimension(dim, strict=True) data = dataset.data[dim.name] if not expanded: data = data.unique() return data.values_host if compute else data.values elif keep_index: return data elif compute: return data.values_host try: return data.values except Exception: return data.values_host @classmethod def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): # Get dimensions information dimensions = [dataset.get_dimension(d).name for d in dimensions] kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions] # Update the kwargs appropriately for Element group types group_kwargs = {} group_type = dict if group_type == 'raw' else group_type if issubclass(group_type, Element): group_kwargs.update(util.get_param_values(dataset)) group_kwargs['kdims'] = kdims group_kwargs.update(kwargs) # Propagate dataset group_kwargs['dataset'] = dataset.dataset # Find all the keys along supplied dimensions keys = product(*(dataset.data[dimensions[0]].unique().values_host for d in dimensions)) # Iterate over the unique entries applying selection masks grouped_data = [] for unique_key in util.unique_iterator(keys): group_data = dataset.select(**dict(zip(dimensions, unique_key))) if not len(group_data): continue group_data = group_type(group_data, **group_kwargs) grouped_data.append((unique_key, group_data)) if issubclass(container_type, NdMapping): with item_check(False), sorted_context(False): kdims = [dataset.get_dimension(d) for d in dimensions] return container_type(grouped_data, kdims=kdims) else: return container_type(grouped_data) @classmethod def select_mask(cls, dataset, selection): """ Given a Dataset object and a dictionary with dimension keys and selection keys (i.e. tuple ranges, slices, sets, lists, or literals) return a boolean mask over the rows in the Dataset object that have been selected. """ mask = None for dim, sel in selection.items(): if isinstance(sel, tuple): sel = slice(*sel) arr = cls.values(dataset, dim, keep_index=True) if util.isdatetime(arr) and util.pd: try: sel = util.parse_datetime_selection(sel) except: pass new_masks = [] if isinstance(sel, slice): with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'invalid value encountered') if sel.start is not None: new_masks.append(sel.start <= arr) if sel.stop is not None: new_masks.append(arr < sel.stop) if not new_masks: continue new_mask = new_masks[0] for imask in new_masks[1:]: new_mask &= imask elif isinstance(sel, (set, list)): for v in sel: new_masks.append(arr==v) if not new_masks: continue new_mask = new_masks[0] for imask in new_masks[1:]: new_mask |= imask elif callable(sel): new_mask = sel(arr) else: new_mask = arr == sel if mask is None: mask = new_mask else: mask &= new_mask return mask @classmethod def select(cls, dataset, selection_mask=None, **selection): df = dataset.data if selection_mask is None: selection_mask = cls.select_mask(dataset, selection) indexed = cls.indexed(dataset, selection) if selection_mask is not None: df = df.loc[selection_mask] if indexed and len(df) == 1 and len(dataset.vdims) == 1: return df[dataset.vdims[0].name].iloc[0] return df @classmethod def concat_fn(cls, dataframes, **kwargs): import cudf return cudf.concat(dataframes, **kwargs) @classmethod def add_dimension(cls, dataset, dimension, dim_pos, values, vdim): data = dataset.data.copy() if dimension.name not in data: data[dimension.name] = values return data @classmethod def aggregate(cls, dataset, dimensions, function, **kwargs): data = dataset.data cols = [d.name for d in dataset.kdims if d in dimensions] vdims = dataset.dimensions('value', label='name') reindexed = data[cols+vdims] agg = function.__name__ if len(dimensions): agg_map = {'amin': 'min', 'amax': 'max'} agg = agg_map.get(agg, agg) grouped = reindexed.groupby(cols, sort=False) if not hasattr(grouped, agg): raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg) df = getattr(grouped, agg)().reset_index() else: agg_map = {'amin': 'min', 'amax': 'max', 'size': 'count'} agg = agg_map.get(agg, agg) if not hasattr(reindexed, agg): raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg) agg = getattr(reindexed, agg)() data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_array()))) df = util.pd.DataFrame(data, columns=list(agg.index.values_host)) dropped = [] for vd in vdims: if vd not in df.columns: dropped.append(vd) return df, dropped @classmethod def iloc(cls, dataset, index): import cudf rows, cols = index scalar = False columns = list(dataset.data.columns) if isinstance(cols, slice): cols = [d.name for d in dataset.dimensions()][cols] elif np.isscalar(cols): scalar = np.isscalar(rows) cols = [dataset.get_dimension(cols).name] else: cols = [dataset.get_dimension(d).name for d in index[1]] col_index = [columns.index(c) for c in cols] if np.isscalar(rows): rows = [rows] if scalar: return dataset.data[cols[0]].iloc[rows[0]] result = dataset.data.iloc[rows, col_index] # cuDF does not handle single rows and cols indexing correctly # as of cudf=0.10.0 so we have to convert Series back to DataFrame if isinstance(result, cudf.Series): if len(cols) == 1: result = result.to_frame(cols[0]) else: result = result.to_frame().T return result @classmethod def sort(cls, dataset, by=[], reverse=False): cols = [dataset.get_dimension(d, strict=True).name for d in by] return dataset.data.sort_values(by=cols, ascending=not reverse) @classmethod def dframe(cls, dataset, dimensions): if dimensions: return dataset.data[dimensions].to_pandas() else: return dataset.data.to_pandas() Interface.register(cuDFInterface)
from corepy.spre.spe import Instruction, DispatchInstruction, Register from spu_insts import * __doc__=""" ISA for the Cell Broadband Engine's SPU. """ class lqx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':452} cycles = (1, 6, 0) class stqx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':324} cycles = (1, 6, 0) class cbx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':468} cycles = (1, 4, 0) class chx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':469} cycles = (1, 4, 0) class cwx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':470} cycles = (1, 4, 0) class cdx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':471} cycles = (1, 4, 0) class ah(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':200} cycles = (0, 2, 0) class a(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':192} cycles = (0, 2, 0) class sfh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':72} cycles = (0, 2, 0) class sf(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':64} cycles = (0, 2, 0) class addx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':832} cycles = (0, 2, 0) class cg(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':194} cycles = (0, 2, 0) class cgx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':834} cycles = (0, 2, 0) class sfx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':833} cycles = (0, 2, 0) class bg(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':66} cycles = (0, 2, 0) class bgx(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':835} cycles = (0, 2, 0) class mpy(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':964} cycles = (0, 7, 0) class mpyu(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':972} cycles = (0, 7, 0) class mpyh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':965} cycles = (0, 7, 0) class mpys(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':967} cycles = (0, 7, 0) class mpyhh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':966} cycles = (0, 7, 0) class mpyhha(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':838} cycles = (0, 7, 0) class mpyhhu(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':974} cycles = (0, 7, 0) class mpyhhau(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':846} cycles = (0, 7, 0) class clz(Instruction): machine_inst = OPCD_A_T params = {'OPCD':677} cycles = (0, 2, 0) class cntb(Instruction): machine_inst = OPCD_A_T params = {'OPCD':692} cycles = (0, 4, 0) class fsmb(Instruction): machine_inst = OPCD_A_T params = {'OPCD':438} cycles = (1, 4, 0) class fsmh(Instruction): machine_inst = OPCD_A_T params = {'OPCD':437} cycles = (1, 4, 0) class fsm(Instruction): machine_inst = OPCD_A_T params = {'OPCD':436} cycles = (1, 4, 0) class gbb(Instruction): machine_inst = OPCD_A_T params = {'OPCD':434} cycles = (1, 4, 0) class gbh(Instruction): machine_inst = OPCD_A_T params = {'OPCD':433} cycles = (1, 4, 0) class gb(Instruction): machine_inst = OPCD_A_T params = {'OPCD':432} cycles = (1, 4, 0) class avgb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':211} cycles = (0, 4, 0) class absdb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':83} cycles = (0, 4, 0) class sumb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':595} cycles = (0, 4, 0) class xsbh(Instruction): machine_inst = OPCD_A_T params = {'OPCD':694} cycles = (0, 2, 0) class xshw(Instruction): machine_inst = OPCD_A_T params = {'OPCD':686} cycles = (0, 2, 0) class xswd(Instruction): machine_inst = OPCD_A_T params = {'OPCD':678} cycles = (0, 2, 0) class and_(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':193} cycles = (0, 2, 0) class andc(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':705} cycles = (0, 2, 0) class or_(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':65} cycles = (0, 2, 0) class orc(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':713} cycles = (0, 2, 0) class orx(Instruction): machine_inst = OPCD_A_T params = {'OPCD':496} cycles = (1, 4, 0) class xor(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':577} cycles = (0, 2, 0) class nand(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':201} cycles = (0, 2, 0) class nor(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':73} cycles = (0, 2, 0) class eqv(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':585} cycles = (0, 2, 0) class shlh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':95} cycles = (0, 4, 0) class shl(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':91} cycles = (0, 4, 0) class shlqbi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':475} cycles = (1, 4, 0) class shlqby(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':479} cycles = (1, 4, 0) class shlqbybi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':463} cycles = (1, 4, 0) class roth(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':92} cycles = (0, 4, 0) class rot(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':88} cycles = (0, 4, 0) class rotqby(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':476} cycles = (1, 4, 0) class rotqbybi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':460} cycles = (1, 4, 0) class rotqbi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':472} cycles = (1, 4, 0) class rothm(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':93} cycles = (0, 4, 0) class rotm(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':89} cycles = (0, 4, 0) class rotqmby(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':477} cycles = (1, 4, 0) class rotqmbybi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':461} cycles = (1, 4, 0) class rotqmbi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':473} cycles = (1, 4, 0) class rotmah(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':94} cycles = (0, 4, 0) class rotma(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':90} cycles = (0, 4, 0) class heq(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':984} cycles = (0, 2, 0) class hgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':600} cycles = (0, 2, 0) class hlgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':728} cycles = (0, 2, 0) class ceqb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':976} cycles = (0, 2, 0) class ceqh(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':968} cycles = (0, 2, 0) class ceq(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':960} cycles = (0, 2, 0) class cgtb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':592} cycles = (0, 2, 0) class cgth(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':584} cycles = (0, 2, 0) class cgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':576} cycles = (0, 2, 0) class clgtb(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':720} cycles = (0, 2, 0) class clgth(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':712} cycles = (0, 2, 0) class clgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':704} cycles = (0, 2, 0) class bi(Instruction): machine_inst = OPCD_A_D_E params = {'OPCD':424} cycles = (1, 4, 0) class iret(Instruction): machine_inst = OPCD_A_D_E params = {'OPCD':426} cycles = (1, 4, 0) class bisled(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':427} cycles = (1, 4, 0) class bisl(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':425} cycles = (1, 4, 0) class biz(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':296} cycles = (1, 4, 0) class binz(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':297} cycles = (1, 4, 0) class bihz(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':294} cycles = (1, 4, 0) class bihnz(Instruction): machine_inst = OPCD_A_T_D_E params = {'OPCD':299} cycles = (1, 4, 0) class hbr(DispatchInstruction): cycles = (1, 15, 0) dispatch = ( (OPCD_RO_A_P, {'OPCD':428}), (OPCD_LBL9_A_P, {'OPCD':428})) class fa(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':708} cycles = (0, 6, 0) class dfa(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':716} cycles = (0, 13, 6) class fs(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':709} cycles = (0, 6, 0) class dfs(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':717} cycles = (0, 13, 6) class fm(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':710} cycles = (0, 6, 0) class dfm(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':718} cycles = (0, 13, 6) class dfma(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':860} cycles = (0, 13, 6) class dfnms(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':862} cycles = (0, 13, 6) class dfms(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':861} cycles = (0, 13, 6) class dfnma(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':863} cycles = (0, 13, 6) class frest(Instruction): machine_inst = OPCD_A_T params = {'OPCD':440} cycles = (1, 4, 0) class frsqest(Instruction): machine_inst = OPCD_A_T params = {'OPCD':441} cycles = (1, 4, 0) class fi(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':980} cycles = (0, 7, 0) class frds(Instruction): machine_inst = OPCD_A_T params = {'OPCD':953} cycles = (0, 13, 6) class fesd(Instruction): machine_inst = OPCD_A_T params = {'OPCD':952} cycles = (0, 13, 6) class fceq(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':962} cycles = (0, 2, 0) class fcmeq(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':970} cycles = (0, 2, 0) class fcgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':706} cycles = (0, 2, 0) class fcmgt(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':714} cycles = (0, 2, 0) class fscrwr(Instruction): machine_inst = OPCD_A_T params = {'OPCD':954} cycles = (0, 7, 0) class fscrrd(Instruction): machine_inst = OPCD_T params = {'OPCD':920} cycles = (0, 13, 6) class stop(Instruction): machine_inst = OPCD_STOP_SIG params = {'OPCD':0} cycles = (1, 4, 0) class stopd(Instruction): machine_inst = OPCD_B_A_T params = {'OPCD':320} cycles = (1, 4, 0) class lnop(Instruction): machine_inst = OPCD params = {'OPCD':1} cycles = (1, 0, 0) class nop(Instruction): machine_inst = OPCD_T params = {'OPCD':513} cycles = (0, 0, 0) class sync(Instruction): machine_inst = OPCD_CF params = {'OPCD':2} cycles = (1, 4, 0) class dsync(Instruction): machine_inst = OPCD params = {'OPCD':3} cycles = (1, 4, 0) class mfspr(Instruction): machine_inst = OPCD_SA_T params = {'OPCD':12} cycles = (1, 6, 0) class mtspr(Instruction): machine_inst = OPCD_SA_T params = {'OPCD':268} cycles = (1, 6, 0) class rdch(Instruction): machine_inst = OPCD_A_T params = {'OPCD':13} cycles = (1, 6, 0) class rchcnt(Instruction): machine_inst = OPCD_A_T params = {'OPCD':15} cycles = (1, 6, 0) class wrch(Instruction): machine_inst = OPCD_A_T params = {'OPCD':269} cycles = (1, 6, 0) class mpya(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':12} cycles = (0, 7, 0) class selb(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':8} cycles = (0, 2, 0) class shufb(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':11} cycles = (1, 4, 0) class fma(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':14} cycles = (0, 6, 0) class fnms(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':13} cycles = (0, 6, 0) class fms(Instruction): machine_inst = OPCD_T_B_A_C params = {'OPCD':15} cycles = (0, 6, 0) class cbd(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':500} cycles = (1, 4, 0) class chd(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':501} cycles = (1, 4, 0) class cwd(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':502} cycles = (1, 4, 0) class cdd(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':503} cycles = (1, 4, 0) class shlhi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':127} cycles = (0, 4, 0) class shli(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':123} cycles = (0, 4, 0) class shlqbii(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':507} cycles = (1, 4, 0) class shlqbyi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':511} cycles = (1, 4, 0) class rothi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':124} cycles = (0, 4, 0) class roti(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':120} cycles = (0, 4, 0) class rotqbyi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':508} cycles = (1, 4, 0) class rotqbii(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':504} cycles = (1, 4, 0) class rothmi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':125} cycles = (0, 4, 0) class rotmi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':121} cycles = (0, 4, 0) class rotqmbyi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':509} cycles = (1, 4, 0) class rotqmbii(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':505} cycles = (1, 4, 0) class rotmahi(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':126} cycles = (0, 4, 0) class rotmai(Instruction): machine_inst = OPCD_I7_A_T params = {'OPCD':122} cycles = (0, 4, 0) class csflt(Instruction): machine_inst = OPCD_I8_A_T params = {'OPCD':474} cycles = (0, 7, 0) class cflts(Instruction): machine_inst = OPCD_I8_A_T params = {'OPCD':472} cycles = (0, 7, 0) class cuflt(Instruction): machine_inst = OPCD_I8_A_T params = {'OPCD':475} cycles = (0, 7, 0) class cfltu(Instruction): machine_inst = OPCD_I8_A_T params = {'OPCD':473} cycles = (0, 7, 0) class lqd(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':52} cycles = (1, 6, 0) class stqd(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':36} cycles = (1, 6, 0) class ahi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':29} cycles = (0, 2, 0) class ai(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':28} cycles = (0, 2, 0) class sfhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':13} cycles = (0, 2, 0) class sfi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':12} cycles = (0, 2, 0) class mpyi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':116} cycles = (0, 7, 0) class mpyui(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':117} cycles = (0, 7, 0) class andbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':22} cycles = (0, 2, 0) class andhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':21} cycles = (0, 2, 0) class andi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':20} cycles = (0, 2, 0) class orbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':6} cycles = (0, 2, 0) class orhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':5} cycles = (0, 2, 0) class ori(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':4} cycles = (0, 2, 0) class xorbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':70} cycles = (0, 2, 0) class xorhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':69} cycles = (0, 2, 0) class xori(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':68} cycles = (0, 2, 0) class heqi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':127} cycles = (0, 2, 0) class hgti(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':79} cycles = (0, 2, 0) class hlgti(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':95} cycles = (0, 2, 0) class ceqbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':126} cycles = (0, 2, 0) class ceqhi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':125} cycles = (0, 2, 0) class ceqi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':124} cycles = (0, 2, 0) class cgtbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':78} cycles = (0, 2, 0) class cgthi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':77} cycles = (0, 2, 0) class cgti(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':76} cycles = (0, 2, 0) class clgtbi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':94} cycles = (0, 2, 0) class clgthi(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':93} cycles = (0, 2, 0) class clgti(Instruction): machine_inst = OPCD_I10_A_T params = {'OPCD':92} cycles = (0, 2, 0) class lqa(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':97} cycles = (1, 6, 0) class lqr(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':103} cycles = (1, 6, 0) class stqa(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':65} cycles = (1, 6, 0) class stqr(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':71} cycles = (1, 6, 0) class ilh(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':131} cycles = (0, 2, 0) class ilhu(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':130} cycles = (0, 2, 0) class il(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':129} cycles = (0, 2, 0) class iohl(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':193} cycles = (0, 2, 0) class fsmbi(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':101} cycles = (1, 4, 0) class br(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16, {'OPCD':100}), (OPCD_LBL16, {'OPCD':100})) class bra(Instruction): machine_inst = OPCD_I16 params = {'OPCD':96} cycles = (1, 4, 0) class brsl(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16_T, {'OPCD':102}), (OPCD_LBL16_T, {'OPCD':102})) class brasl(Instruction): machine_inst = OPCD_I16_T params = {'OPCD':98} cycles = (1, 4, 0) class brnz(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16_T, {'OPCD':66}), (OPCD_LBL16_T, {'OPCD':66})) class brz(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16_T, {'OPCD':64}), (OPCD_LBL16_T, {'OPCD':64})) class brhnz(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16, {'OPCD':70}), (OPCD_LBL16, {'OPCD':70})) class brhz(DispatchInstruction): cycles = (1, 4, 0) dispatch = ( (OPCD_I16, {'OPCD':68}), (OPCD_LBL16, {'OPCD':68})) class hbra(Instruction): machine_inst = OPCD_LBL9_I16 params = {'OPCD':8} cycles = (1, 15, 0) class hbrr(DispatchInstruction): cycles = (1, 15, 0) dispatch = ( (OPCD_ROA_I16, {'OPCD':9}), (OPCD_LBL9_LBL16, {'OPCD':9})) class ila(Instruction): machine_inst = OPCD_I18_T params = {'OPCD':33} cycles = (0, 2, 0)
from .fetchers import NUPermissionsFetcher from .fetchers import NUMetadatasFetcher from .fetchers import NUGlobalMetadatasFetcher from bambou import NURESTObject class NUVMResync(NURESTObject): """ Represents a VMResync in the VSD Notes: Provide information about the state of a VM resync request. """ __rest_name__ = "resync" __resource_name__ = "resync" ## Constants CONST_STATUS_IN_PROGRESS = "IN_PROGRESS" CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL" CONST_STATUS_SUCCESS = "SUCCESS" CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE" def __init__(self, **kwargs): """ Initializes a VMResync instance Notes: You can specify all parameters while calling this methods. A special argument named `data` will enable you to load the object from a Python dictionary Examples: >>> vmresync = NUVMResync(id=u'xxxx-xxx-xxx-xxx', name=u'VMResync') >>> vmresync = NUVMResync(data=my_dict) """ super(NUVMResync, self).__init__() # Read/Write Attributes self._last_request_timestamp = None self._last_time_resync_initiated = None self._last_updated_by = None self._last_updated_date = None self._embedded_metadata = None self._entity_scope = None self._creation_date = None self._status = None self._owner = None self._external_id = None self.expose_attribute(local_name="last_request_timestamp", remote_name="lastRequestTimestamp", attribute_type=int, is_required=False, is_unique=False) self.expose_attribute(local_name="last_time_resync_initiated", remote_name="lastTimeResyncInitiated", attribute_type=int, is_required=False, is_unique=False) self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False) self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL']) self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="status", remote_name="status", attribute_type=str, is_required=False, is_unique=False, choices=[u'IN_PROGRESS', u'SUCCESS']) self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True) # Fetchers self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child") self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self._compute_args(**kwargs) # Properties @property def last_request_timestamp(self): """ Get last_request_timestamp value. Notes: Time of the last timestamp received This attribute is named `lastRequestTimestamp` in VSD API. """ return self._last_request_timestamp @last_request_timestamp.setter def last_request_timestamp(self, value): """ Set last_request_timestamp value. Notes: Time of the last timestamp received This attribute is named `lastRequestTimestamp` in VSD API. """ self._last_request_timestamp = value @property def last_time_resync_initiated(self): """ Get last_time_resync_initiated value. Notes: Time that the resync was initiated This attribute is named `lastTimeResyncInitiated` in VSD API. """ return self._last_time_resync_initiated @last_time_resync_initiated.setter def last_time_resync_initiated(self, value): """ Set last_time_resync_initiated value. Notes: Time that the resync was initiated This attribute is named `lastTimeResyncInitiated` in VSD API. """ self._last_time_resync_initiated = value @property def last_updated_by(self): """ Get last_updated_by value. Notes: ID of the user who last updated the object. This attribute is named `lastUpdatedBy` in VSD API. """ return self._last_updated_by @last_updated_by.setter def last_updated_by(self, value): """ Set last_updated_by value. Notes: ID of the user who last updated the object. This attribute is named `lastUpdatedBy` in VSD API. """ self._last_updated_by = value @property def last_updated_date(self): """ Get last_updated_date value. Notes: Time stamp when this object was last updated. This attribute is named `lastUpdatedDate` in VSD API. """ return self._last_updated_date @last_updated_date.setter def last_updated_date(self, value): """ Set last_updated_date value. Notes: Time stamp when this object was last updated. This attribute is named `lastUpdatedDate` in VSD API. """ self._last_updated_date = value @property def embedded_metadata(self): """ Get embedded_metadata value. Notes: Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration. This attribute is named `embeddedMetadata` in VSD API. """ return self._embedded_metadata @embedded_metadata.setter def embedded_metadata(self, value): """ Set embedded_metadata value. Notes: Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration. This attribute is named `embeddedMetadata` in VSD API. """ self._embedded_metadata = value @property def entity_scope(self): """ Get entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level This attribute is named `entityScope` in VSD API. """ return self._entity_scope @entity_scope.setter def entity_scope(self, value): """ Set entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level This attribute is named `entityScope` in VSD API. """ self._entity_scope = value @property def creation_date(self): """ Get creation_date value. Notes: Time stamp when this object was created. This attribute is named `creationDate` in VSD API. """ return self._creation_date @creation_date.setter def creation_date(self, value): """ Set creation_date value. Notes: Time stamp when this object was created. This attribute is named `creationDate` in VSD API. """ self._creation_date = value @property def status(self): """ Get status value. Notes: Status of the resync """ return self._status @status.setter def status(self, value): """ Set status value. Notes: Status of the resync """ self._status = value @property def owner(self): """ Get owner value. Notes: Identifies the user that has created this object. """ return self._owner @owner.setter def owner(self, value): """ Set owner value. Notes: Identifies the user that has created this object. """ self._owner = value @property def external_id(self): """ Get external_id value. Notes: External object ID. Used for integration with third party systems This attribute is named `externalID` in VSD API. """ return self._external_id @external_id.setter def external_id(self, value): """ Set external_id value. Notes: External object ID. Used for integration with third party systems This attribute is named `externalID` in VSD API. """ self._external_id = value
from mock import patch from nose.tools import eq_ from helper import TestCase import appvalidator.submain as submain class TestSubmainPackage(TestCase): @patch("appvalidator.submain.test_inner_package", lambda x, z: "success") def test_package_pass(self): "Tests the test_package function with simple data" self.setup_err() name = "tests/resources/submain/install_rdf.xpi" with open(name) as pack: result = submain.test_package(self.err, pack, name) self.assert_silent() eq_(result, "success") @patch("appvalidator.submain.test_inner_package", lambda x, z: "success") def test_package_corrupt(self): "Tests the test_package function fails with a non-zip" self.setup_err() name = "tests/resources/junk.xpi" with open(name) as pack: result = submain.test_package(self.err, pack, name) self.assert_failed() def test_package_corrupt(self): "Tests the test_package function fails with a corrupt file" self.setup_err() name = "tests/resources/corrupt.xpi" result = submain.test_package(self.err, name, name) self.assert_failed(with_errors=True, with_warnings=True)
from DBSlayer import Query def get_type_name (type_id): l = get_type (type_id) if not l: return None return l['name'] def get_type (type_id): q = "SELECT id, type "\ "FROM asset_types WHERE id=%(type_id)s;" % locals() query = Query(q) if len(query) != 1: return None ret = {'id': type_id, 'name': query['type'][0]} return ret def get_types (): q = "SELECT id, type "\ "FROM asset_types;" % locals() query = Query(q) if not len(query): return None ret = [] for x in query: d={'id': query[x]['id'], 'name': query[x]['type']} ret.append(d) return ret def test (): import sys try: type_id = sys.argv[1] except IndexError: print 'Required test parameters: type_id' sys.exit(1) print 'Types:', get_types() print 'type_id %s, type_name %s' % (type_id, get_type_name(type_id)) print get_type(type_id), if __name__ == '__main__': test()
''' Production Configurations - Use djangosecure - Use mailgun to send emails - Use redis ''' from __future__ import absolute_import, unicode_literals from django.utils import six from .common import * # noqa SECRET_KEY = env("DJANGO_SECRET_KEY") SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') INSTALLED_APPS += ("djangosecure", ) MIDDLEWARE_CLASSES = ( # Make sure djangosecure.middleware.SecurityMiddleware is listed first 'djangosecure.middleware.SecurityMiddleware', ) + MIDDLEWARE_CLASSES SECURE_HSTS_SECONDS = 60 SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool("DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True) SECURE_FRAME_DENY = env.bool("DJANGO_SECURE_FRAME_DENY", default=True) SECURE_CONTENT_TYPE_NOSNIFF = env.bool("DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True) SECURE_BROWSER_XSS_FILTER = True SESSION_COOKIE_SECURE = False SESSION_COOKIE_HTTPONLY = True SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True) ALLOWED_HOSTS = ["*"] DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL', default='{{cookiecutter.project_name}} <noreply@{{cookiecutter.domain_name}}>') EMAIL_BACKEND = 'django_mailgun.MailgunBackend' MAILGUN_ACCESS_KEY = env('DJANGO_MAILGUN_API_KEY') MAILGUN_SERVER_NAME = env('DJANGO_MAILGUN_SERVER_NAME') EMAIL_SUBJECT_PREFIX = env("DJANGO_EMAIL_SUBJECT_PREFIX", default='[{{cookiecutter.project_name}}] ') SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL) TEMPLATES[0]['OPTIONS']['loaders'] = [ ('django.template.loaders.cached.Loader', [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]), ] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': [ 'redis:6379', ], 'OPTIONS': { 'DB': 1, 'PARSER_CLASS': 'redis.connection.HiredisParser', 'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool', 'CONNECTION_POOL_CLASS_KWARGS': { 'max_connections': 50, 'timeout': 20, }, 'MAX_CONNECTIONS': 1000, 'PICKLE_VERSION': -1, }, }, } STATIC_URL = '/static/' MEDIA_URL = '/media/' STATIC_ROOT = '/static' MEDIA_ROOT = '/media' STATICFILES_DIRS = ( unicode(APPS_DIR.path("static")), ) {% if cookiecutter.use_celery %} BROKER_URL = "amqp://guest:guest@rabbitmq:5672//" {% endif %} {% if cookiecutter.use_sentry %} RAVEN_CONFIG = { 'dsn': env("SENTRY_URL"), } INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', ) {% endif %}
"""Package containing the different outputs. Each output type is defined inside a module. """
from collections import namedtuple from blinkpy.common.net.results_fetcher import TestResultsFetcher BuilderStep = namedtuple('BuilderStep', ['build', 'step_name']) class MockTestResultsFetcher(TestResultsFetcher): def __init__(self): super(MockTestResultsFetcher, self).__init__() self._canned_results = {} self._canned_retry_summary_json = {} self._webdriver_results = {} self.fetched_builds = [] self.fetched_webdriver_builds = [] self._layout_test_step_name = 'blink_web_tests (with patch)' def set_results(self, build, results, step_name=None): step_name = step_name or self.get_layout_test_step_name(build) step = BuilderStep(build=build, step_name=step_name) self._canned_results[step] = results def fetch_results(self, build, full=False, step_name=None): step_name = step_name or self.get_layout_test_step_name(build) step = BuilderStep(build=build, step_name=step_name) self.fetched_builds.append(step) return self._canned_results.get(step) def set_results_to_resultdb(self, build, results): self._canned_results[build.build_id] = results def fetch_results_from_resultdb(self, host, builds, predicate): rv = [] for build in builds: results = self._canned_results.get(build.build_id) if results: rv.extend(results) return rv def set_webdriver_test_results(self, build, m, results): self._webdriver_results[(build, m)] = results def fetch_webdriver_test_results(self, build, m): self.fetched_webdriver_builds.append((build, m)) return self._webdriver_results.get((build, m)) def set_retry_sumary_json(self, build, content): self._canned_retry_summary_json[build] = content def fetch_retry_summary_json(self, build): return self._canned_retry_summary_json.get(build) def set_layout_test_step_name(self, name): self._layout_test_step_name = name def get_layout_test_step_name(self, build): return self._layout_test_step_name
import access import util @auth.requires_login() def index(): """Produces a list of the feedback obtained for a given venue, or for all venues.""" venue_id = request.args(0) if venue_id == 'all': q = (db.submission.user == get_user_email()) else: q = ((db.submission.user == get_user_email()) & (db.submission.venue_id == venue_id)) db.submission.id.represent = lambda x, r: A(T('View'), _class='btn', _href=URL('submission', 'view_own_submission', args=['v', r.id])) db.submission.id.label = T('Submission') db.submission.id.readable = True db.submission.venue_id.readable = True grid = SQLFORM.grid(q, fields=[db.submission.id, db.submission.venue_id, db.submission.date_created, db.submission.date_updated, ], csv=False, details=False, create=False, editable=False, deletable=False, args=request.args[:1], maxtextlength=24, ) return dict(grid=grid) @auth.requires_login() def view_feedback(): """Shows detailed feedback for a user in a venue. This controller accepts various types of arguments: * 's', submission_id * 'u', venue_id, username * 'v', venue_id (in which case, shows own submission to that venue) """ if len(request.args) == 0: redirect(URL('default', 'index')) if request.args(0) == 's': # submission_id n_args = 2 subm = db.submission(request.args(1)) or redirect(URL('default', 'index')) c = db.venue(subm.venue_id) or redirect(URL('default', 'index')) username = subm.user elif request.args(0) == 'v': # venue_id n_args = 2 c = db.venue(request.args(1)) or redirect(URL('default', 'index')) username = get_user_email() subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first() else: # venue_id, username n_args = 3 c = db.venue(request.args(1)) or redirect(URL('default', 'index')) username = request.args(2) or redirect(URL('default', 'index')) subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first() # Checks permissions. props = db(db.user_properties.user == get_user_email()).select().first() if props == None: session.flash = T('Not authorized.') redirect(URL('default', 'index')) is_author = (username == get_user_email()) can_view_feedback = access.can_view_feedback(c, props) or is_author if (not can_view_feedback): session.flash = T('Not authorized.') redirect(URL('default', 'index')) if not (access.can_view_feedback(c, props) or datetime.utcnow() > c.rate_close_date): session.flash = T('The ratings are not yet available.') redirect(URL('feedback', 'index', args=['all'])) # Produces the link to edit the feedback. edit_feedback_link = None if subm is not None and access.can_observe(c, props): edit_feedback_link = A(T('Edit feedback'), _class='btn', _href=URL('submission', 'edit_feedback', args=[subm.id])) # Produces the download link. download_link = None if subm is not None and c.allow_file_upload and subm.content is not None: if is_author: download_link = A(T('Download'), _class='btn', _href=URL('submission', 'download_author', args=[subm.id, subm.content])) else: download_link = A(T('Download'), _class='btn', _href=URL('submission', 'download_manager', args=[subm.id, subm.content])) venue_link = A(c.name, _href=URL('venues', 'view_venue', args=[c.id])) # Submission link. subm_link = None if subm is not None and c.allow_link_submission: subm_link = A(subm.link, _href=subm.link) # Submission content and feedback. subm_comment = None subm_feedback = None if subm is not None: raw_subm_comment = keystore_read(subm.comment) if raw_subm_comment is not None and len(raw_subm_comment) > 0: subm_comment = MARKMIN(keystore_read(subm.comment)) raw_feedback = keystore_read(subm.feedback) if raw_feedback is not None and len(raw_feedback) > 0: subm_feedback = MARKMIN(raw_feedback) # Display settings. db.submission.percentile.readable = True db.submission.comment.readable = True db.submission.feedback.readable = True if access.can_observe(c, props): db.submission.quality.readable = True db.submission.error.readable = True # Reads the grade information. submission_grade = submission_percentile = None review_grade = review_percentile = user_reputation = None final_grade = final_percentile = None assigned_grade = None if c.grades_released: grade_info = db((db.grades.user == username) & (db.grades.venue_id == c.id)).select().first() if grade_info is not None: submission_grade = represent_quality(grade_info.submission_grade, None) submission_percentile = represent_percentage(grade_info.submission_percentile, None) review_grade = represent_quality_10(grade_info.accuracy, None) review_percentile = represent_percentage(grade_info.accuracy_percentile, None) user_reputation = represent_01_as_percentage(grade_info.reputation, None) final_grade = represent_quality(grade_info.grade, None) final_percentile = represent_percentage(grade_info.percentile, None) assigned_grade = represent_quality(grade_info.assigned_grade, None) # Makes a grid of comments. db.task.submission_name.readable = False db.task.assigned_date.readable = False db.task.completed_date.readable = False db.task.rejected.readable = True db.task.helpfulness.readable = db.task.helpfulness.writable = True # Prevent editing the comments; the only thing editable should be the "is bogus" field. db.task.comments.writable = False db.task.comments.readable = True ranking_link = None if access.can_observe(c, props): db.task.user.readable = True db.task.completed_date.readable = True links = [ dict(header=T('Review details'), body= lambda r: A(T('View'), _class='btn', _href=URL('ranking', 'view_comparison', args=[r.id]))), ] details = False if subm is not None: ranking_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_submission', args=[subm.id])) reviews_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_user', args=[username, c.id])) db.task.user.represent = lambda v, r: A(v, _href=URL('ranking', 'view_comparisons_given_user', args=[v, c.id], user_signature=True)) else: user_reputation = None links = [ dict(header=T('Review feedback'), body = lambda r: A(T('Give feedback'), _class='btn', _href=URL('feedback', 'reply_to_review', args=[r.id], user_signature=True))), ] details = False ranking_link = None reviews_link = None if subm is not None: q = ((db.task.submission_id == subm.id) & (db.task.is_completed == True)) # q = (db.task.submission_id == subm.id) else: q = (db.task.id == -1) grid = SQLFORM.grid(q, fields=[db.task.id, db.task.user, db.task.rejected, db.task.comments, db.task.helpfulness, ], details = details, csv=False, create=False, editable=False, deletable=False, searchable=False, links=links, args=request.args[:n_args], maxtextlength=24, ) return dict(subm=subm, download_link=download_link, subm_link=subm_link, username=username, subm_comment=subm_comment, subm_feedback=subm_feedback, edit_feedback_link=edit_feedback_link, is_admin=is_user_admin(), submission_grade=submission_grade, submission_percentile=submission_percentile, review_grade=review_grade, review_percentile=review_percentile, user_reputation=user_reputation, final_grade=final_grade, final_percentile=final_percentile, assigned_grade=assigned_grade, venue_link=venue_link, grid=grid, ranking_link=ranking_link, reviews_link=reviews_link) @auth.requires_signature() def reply_to_review(): t = db.task(request.args(0)) or redirect(URL('default', 'index')) db.task.submission_name.readable = False db.task.assigned_date.readable = False db.task.completed_date.readable = False db.task.comments.readable = False db.task.helpfulness.readable = db.task.helpfulness.writable = True db.task.feedback.readable = db.task.feedback.writable = True form = SQLFORM(db.task, record=t) form.vars.feedback = keystore_read(t.feedback) if form.process(onvalidation=validate_review_feedback(t)).accepted: session.flash = T('Updated.') redirect(URL('feedback', 'view_feedback', args=['s', t.submission_id])) link_to_submission = A(T('View submission'), _href=URL('submission', 'view_own_submission', args=['v', t.submission_id])) review_comments = MARKMIN(keystore_read(t.comments)) return dict(form=form, link_to_submission=link_to_submission, review_comments=review_comments) def validate_review_feedback(t): def f(form): if not form.errors: feedback_id = keystore_update(t.feedback, form.vars.feedback) form.vars.feedback = feedback_id return f @auth.requires_login() def view_my_reviews(): """This controller displays the reviews a user has written for a venue, along with the feedback they received.""" c = db.venue(request.args(0)) or redirect(URL('rating', 'review_index')) link_to_venue = A(c.name, _href=URL('venues', 'view_venue', args=[c.id])) link_to_eval = A(T('My evaluation in this venue'), _class='btn', _href=URL('feedback', 'view_feedback', args=['v', c.id])) q = ((db.task.user == get_user_email()) & (db.task.venue_id == c.id)) db.task.rejected.readable = True db.task.helpfulness.readable = True db.task.comments.readable = True db.task.feedback.readable = True # To prevent chopping db.task.submission_name.represent = represent_text_field grid = SQLFORM.grid(q, fields=[db.task.submission_name, db.task.rejected, db.task.helpfulness], details=True, editable=False, deletable=False, create=False, searchable=False, csv=False, args=request.args[:1], maxtextlength=24, ) return dict(grid=grid, link_to_venue=link_to_venue, link_to_eval=link_to_eval)
from __future__ import print_function import shutil import os, sys import time import logging from .loaders import PythonLoader, YAMLLoader from .bundle import get_all_bundle_files from .exceptions import BuildError from .updater import TimestampUpdater from .merge import MemoryHunk from .version import get_manifest from .cache import FilesystemCache from .utils import set, StringIO __all__ = ('CommandError', 'CommandLineEnvironment', 'main') logging.getLogger('webassets.script').setLevel(logging.INFO) class CommandError(Exception): pass class Command(object): """Base-class for a command used by :class:`CommandLineEnvironment`. Each command being a class opens up certain possibilities with respect to subclassing and customizing the default CLI. """ def __init__(self, cmd_env): self.cmd = cmd_env def __getattr__(self, name): # Make stuff from cmd environment easier to access return getattr(self.cmd, name) def __call__(self, *args, **kwargs): raise NotImplementedError() class BuildCommand(Command): def __call__(self, bundles=None, output=None, directory=None, no_cache=None, manifest=None, production=None): """Build assets. ``bundles`` A list of bundle names. If given, only this list of bundles should be built. ``output`` List of (bundle, filename) 2-tuples. If given, only these bundles will be built, using the custom output filenames. Cannot be used with ``bundles``. ``directory`` Custom output directory to use for the bundles. The original basenames defined in the bundle ``output`` attribute will be used. If the ``output`` of the bundles are pointing to different directories, they will be offset by their common prefix. Cannot be used with ``output``. ``no_cache`` If set, a cache (if one is configured) will not be used. ``manifest`` If set, the given manifest instance will be used, instead of any that might have been configured in the Environment. The value passed will be resolved through ``get_manifest()``. If this fails, a file-based manifest will be used using the given value as the filename. ``production`` If set to ``True``, then :attr:`Environment.debug`` will forcibly be disabled (set to ``False``) during the build. """ # Validate arguments if bundles and output: raise CommandError( 'When specifying explicit output filenames you must ' 'do so for all bundles you want to build.') if directory and output: raise CommandError('A custom output directory cannot be ' 'combined with explicit output filenames ' 'for individual bundles.') if production: # TODO: Reset again (refactor commands to be classes) self.environment.debug = False # TODO: Oh how nice it would be to use the future options stack. if manifest is not None: try: manifest = get_manifest(manifest, env=self.environment) except ValueError: manifest = get_manifest( # abspath() is important, or this will be considered # relative to Environment.directory. "file:%s" % os.path.abspath(manifest), env=self.environment) self.environment.manifest = manifest # Use output as a dict. if output: output = dict(output) # Validate bundle names bundle_names = bundles if bundles else (output.keys() if output else []) for name in bundle_names: if not name in self.environment: raise CommandError( 'I do not know a bundle name named "%s".' % name) # Make a list of bundles to build, and the filename to write to. if bundle_names: # TODO: It's not ok to use an internal property here. bundles = [(n,b) for n, b in self.environment._named_bundles.items() if n in bundle_names] else: # Includes unnamed bundles as well. bundles = [(None, b) for b in self.environment] # Determine common prefix for use with ``directory`` option. if directory: prefix = os.path.commonprefix( [os.path.normpath(b.resolve_output()) for _, b in bundles if b.output]) # dirname() gives the right value for a single file. prefix = os.path.dirname(prefix) to_build = [] for name, bundle in bundles: # TODO: We really should support this. This error here # is just in place of a less understandable error that would # otherwise occur. if bundle.is_container and directory: raise CommandError( 'A custom output directory cannot currently be ' 'used with container bundles.') # Determine which filename to use, if not the default. overwrite_filename = None if output: overwrite_filename = output[name] elif directory: offset = os.path.normpath( bundle.resolve_output())[len(prefix)+1:] overwrite_filename = os.path.join(directory, offset) to_build.append((bundle, overwrite_filename, name,)) # Build. built = [] for bundle, overwrite_filename, name in to_build: if name: # A name is not necessary available of the bundle was # registered without one. self.log.info("Building bundle: %s (to %s)" % ( name, overwrite_filename or bundle.output)) else: self.log.info("Building bundle: %s" % bundle.output) try: if not overwrite_filename: with bundle.bind(self.environment): bundle.build(force=True, disable_cache=no_cache) else: # TODO: Rethink how we deal with container bundles here. # As it currently stands, we write all child bundles # to the target output, merged (which is also why we # create and force writing to a StringIO instead of just # using the ``Hunk`` objects that build() would return # anyway. output = StringIO() with bundle.bind(self.environment): bundle.build(force=True, output=output, disable_cache=no_cache) if directory: # Only auto-create directories in this mode. output_dir = os.path.dirname(overwrite_filename) if not os.path.exists(output_dir): os.makedirs(output_dir) MemoryHunk(output.getvalue()).save(overwrite_filename) built.append(bundle) except BuildError as e: self.log.error("Failed, error was: %s" % e) if len(built): self.event_handlers['post_build']() if len(built) != len(to_build): return 2 class WatchCommand(Command): def __call__(self, loop=None): """Watch assets for changes. ``loop`` A callback, taking no arguments, to be called once every loop iteration. Can be useful to integrate the command with other code. If not specified, the loop wil call ``time.sleep()``. """ # TODO: This should probably also restart when the code changes. mtimes = {} try: # Before starting to watch for changes, also recognize changes # made while we did not run, and apply those immediately. for bundle in self.environment: print('Bringing up to date: %s' % bundle.output) bundle.build(force=False) self.log.info("Watching %d bundles for changes..." % len(self.environment)) while True: changed_bundles = self.check_for_changes(mtimes) built = [] for bundle in changed_bundles: print("Building bundle: %s ..." % bundle.output, end=' ') sys.stdout.flush() try: bundle.build(force=True) built.append(bundle) except BuildError as e: print("") print("Failed: %s" % e) else: print("done") if len(built): self.event_handlers['post_build']() do_end = loop() if loop else time.sleep(0.1) if do_end: break except KeyboardInterrupt: pass def check_for_changes(self, mtimes): # Do not update original mtimes dict right away, so that we detect # all bundle changes if a file is in multiple bundles. _new_mtimes = mtimes.copy() changed_bundles = set() # TODO: An optimization was lost here, skipping a bundle once # a single file has been found to have changed. Bring back. for filename, bundles_to_update in self.yield_files_to_watch(): stat = os.stat(filename) mtime = stat.st_mtime if sys.platform == "win32": mtime -= stat.st_ctime if mtimes.get(filename, mtime) != mtime: if callable(bundles_to_update): # Hook for when file has changed try: bundles_to_update = bundles_to_update() except EnvironmentError: # EnvironmentError is what the hooks is allowed to # raise for a temporary problem, like an invalid config import traceback traceback.print_exc() # Don't update anything, wait for another change bundles_to_update = set() if bundles_to_update is True: # Indicates all bundles should be rebuilt for the change bundles_to_update = set(self.environment) changed_bundles |= bundles_to_update _new_mtimes[filename] = mtime _new_mtimes[filename] = mtime mtimes.update(_new_mtimes) return changed_bundles def yield_files_to_watch(self): for bundle in self.environment: for filename in get_all_bundle_files(bundle): yield filename, set([bundle]) class CleanCommand(Command): def __call__(self): """Delete generated assets. """ self.log.info('Cleaning generated assets...') for bundle in self.environment: if not bundle.output: continue file_path = bundle.resolve_output(self.environment) if os.path.exists(file_path): os.unlink(file_path) self.log.info("Deleted asset: %s" % bundle.output) if isinstance(self.environment.cache, FilesystemCache): shutil.rmtree(self.environment.cache.directory) class CheckCommand(Command): def __call__(self): """Check to see if assets need to be rebuilt. A non-zero exit status will be returned if any of the input files are newer (based on mtime) than their output file. This is intended to be used in pre-commit hooks. """ needsupdate = False updater = self.environment.updater if not updater: self.log.debug('no updater configured, using TimestampUpdater') updater = TimestampUpdater() for bundle in self.environment: self.log.info('Checking asset: %s', bundle.output) if updater.needs_rebuild(bundle, self.environment): self.log.info(' needs update') needsupdate = True if needsupdate: sys.exit(-1) class CommandLineEnvironment(object): """Implements the core functionality for a command line frontend to ``webassets``, abstracted in a way to allow frameworks to integrate the functionality into their own tools, for example, as a Django management command, or a command for ``Flask-Script``. """ def __init__(self, env, log, post_build=None, commands=None): self.environment = env self.log = log self.event_handlers = dict(post_build=lambda: True) if callable(post_build): self.event_handlers['post_build'] = post_build # Instantiate each command command_def = self.DefaultCommands.copy() command_def.update(commands or {}) self.commands = {} for name, construct in command_def.items(): if not construct: continue if not isinstance(construct, (list, tuple)): construct = [construct, (), {}] self.commands[name] = construct[0]( self, *construct[1], **construct[2]) def __getattr__(self, item): # Allow method-like access to commands. if item in self.commands: return self.commands[item] raise AttributeError(item) def invoke(self, command, args): """Invoke ``command``, or throw a CommandError. This is essentially a simple validation mechanism. Feel free to call the individual command methods manually. """ try: function = self.commands[command] except KeyError as e: raise CommandError('unknown command: %s' % e) else: return function(**args) # List of commands installed DefaultCommands = { 'build': BuildCommand, 'watch': WatchCommand, 'clean': CleanCommand, 'check': CheckCommand } class GenericArgparseImplementation(object): """Generic command line utility to interact with an webassets environment. This is effectively a reference implementation of a command line utility based on the ``CommandLineEnvironment`` class. Implementers may find it feasible to simple base their own command line utility on this, rather than implementing something custom on top of ``CommandLineEnvironment``. In fact, if that is possible, you are encouraged to do so for greater consistency across implementations. """ class WatchCommand(WatchCommand): """Extended watch command that also looks at the config file itself.""" def __init__(self, cmd_env, argparse_ns): WatchCommand.__init__(self, cmd_env) self.ns = argparse_ns def yield_files_to_watch(self): for result in WatchCommand.yield_files_to_watch(self): yield result # If the config changes, rebuild all bundles if getattr(self.ns, 'config', None): yield self.ns.config, self.reload_config def reload_config(self): try: self.cmd.environment = YAMLLoader(self.ns.config).load_environment() except Exception as e: raise EnvironmentError(e) return True def __init__(self, env=None, log=None, prog=None, no_global_options=False): try: import argparse except ImportError: raise RuntimeError( 'The webassets command line now requires the ' '"argparse" library on Python versions <= 2.6.') else: self.argparse = argparse self.env = env self.log = log self._construct_parser(prog, no_global_options) def _construct_parser(self, prog=None, no_global_options=False): self.parser = parser = self.argparse.ArgumentParser( description="Manage assets.", prog=prog) if not no_global_options: # Start with the base arguments that are valid for any command. # XXX: Add those to the subparser? parser.add_argument("-v", dest="verbose", action="store_true", help="be verbose") parser.add_argument("-q", action="store_true", dest="quiet", help="be quiet") if self.env is None: loadenv = parser.add_mutually_exclusive_group() loadenv.add_argument("-c", "--config", dest="config", help="read environment from a YAML file") loadenv.add_argument("-m", "--module", dest="module", help="read environment from a Python module") # Add subparsers. subparsers = parser.add_subparsers(dest='command') for command in CommandLineEnvironment.DefaultCommands.keys(): command_parser = subparsers.add_parser(command) maker = getattr(self, 'make_%s_parser' % command, False) if maker: maker(command_parser) @staticmethod def make_build_parser(parser): parser.add_argument( 'bundles', nargs='*', metavar='BUNDLE', help='Optional bundle names to process. If none are ' 'specified, then all known bundles will be built.') parser.add_argument( '--output', '-o', nargs=2, action='append', metavar=('BUNDLE', 'FILE'), help='Build the given bundle, and use a custom output ' 'file. Can be given multiple times.') parser.add_argument( '--directory', '-d', help='Write built files to this directory, using the ' 'basename defined by the bundle. Will offset ' 'the original bundle output paths on their common ' 'prefix. Cannot be used with --output.') parser.add_argument( '--no-cache', action='store_true', help='Do not use a cache that might be configured.') parser.add_argument( '--manifest', help='Write a manifest to the given file. Also supports ' 'the id:arg format, if you want to use a different ' 'manifest implementation.') parser.add_argument( '--production', action='store_true', help='Forcably turn off debug mode for the build. This ' 'only has an effect if debug is set to "merge".') def _setup_logging(self, ns): if self.log: log = self.log else: log = logging.getLogger('webassets.script') if not log.handlers: # In theory, this could run multiple times (e.g. tests) handler = logging.StreamHandler() log.addHandler(handler) # Note that setting the level filter at the handler level is # better than the logger level, since this is "our" handler, # we create it, for the purposes of having a default output. # The logger itself the user may be modifying. handler.setLevel(logging.DEBUG if ns.verbose else ( logging.WARNING if ns.quiet else logging.INFO)) return log def _setup_assets_env(self, ns, log): env = self.env if env is None: assert not (ns.module and ns.config) if ns.module: env = PythonLoader(ns.module).load_environment() if ns.config: env = YAMLLoader(ns.config).load_environment() return env def _setup_cmd_env(self, assets_env, log, ns): return CommandLineEnvironment(assets_env, log, commands={ 'watch': (GenericArgparseImplementation.WatchCommand, (ns,), {}) }) def _prepare_command_args(self, ns): # Prepare a dict of arguments cleaned of values that are not # command-specific, and which the command method would not accept. args = vars(ns).copy() for action in self.parser._actions: dest = action.dest if dest in args: del args[dest] return args def run_with_ns(self, ns): log = self._setup_logging(ns) env = self._setup_assets_env(ns, log) if env is None: raise CommandError( "Error: No environment given or found. Maybe use -m?") cmd = self._setup_cmd_env(env, log, ns) # Run the selected command args = self._prepare_command_args(ns) return cmd.invoke(ns.command, args) def run_with_argv(self, argv): try: ns = self.parser.parse_args(argv) except SystemExit as e: # We do not want the main() function to exit the program. # See run() instead. return e.args[0] return self.run_with_ns(ns) def main(self, argv): """Parse the given command line. The commandline is expected to NOT including what would be sys.argv[0]. """ try: return self.run_with_argv(argv) except CommandError as e: print(e) return 1 def main(argv, env=None): """Execute the generic version of the command line interface. You only need to work directly with ``GenericArgparseImplementation`` if you desire to customize things. If no environment is given, additional arguments will be supported to allow the user to specify/construct the environment on the command line. """ return GenericArgparseImplementation(env).main(argv) def run(): """Runs the command line interface via ``main``, then exits the process with a proper return code.""" sys.exit(main(sys.argv[1:]) or 0) if __name__ == '__main__': run()
from .image import Image from .product_category import ProductCategory from .supplier import Supplier, PaymentMethod from .product import Product from .product import ProductImage from .enum_values import EnumValues from .related_values import RelatedValues from .customer import Customer from .expense import Expense from .incoming import Incoming from .shipping import Shipping, ShippingLine from .receiving import Receiving, ReceivingLine from .inventory_transaction import InventoryTransaction, InventoryTransactionLine from .purchase_order import PurchaseOrder, PurchaseOrderLine from .sales_order import SalesOrder, SalesOrderLine from .user import User from .role import Role, roles_users from .organization import Organization from .inventory_in_out_link import InventoryInOutLink from .aspects import update_menemonic from .product_inventory import ProductInventory
import pygame import pygame.locals class Board(object): """ Plansza do gry. Odpowiada za rysowanie okna gry. """ def __init__(self, width, height): """ Konstruktor planszy do gry. Przygotowuje okienko gry. :param width: szerokość w pikselach :param height: wysokość w pikselach """ self.surface = pygame.display.set_mode((width, height), 0, 32) pygame.display.set_caption('Game of life') def draw(self, *args): """ Rysuje okno gry :param args: lista obiektów do narysowania """ background = (0, 0, 0) self.surface.fill(background) for drawable in args: drawable.draw_on(self.surface) # dopiero w tym miejscu następuje fatyczne rysowanie # w oknie gry, wcześniej tylko ustalaliśmy co i jak ma zostać narysowane pygame.display.update() class GameOfLife(object): """ Łączy wszystkie elementy gry w całość. """ def __init__(self, width, height, cell_size=10): """ Przygotowanie ustawień gry :param width: szerokość planszy mierzona liczbą komórek :param height: wysokość planszy mierzona liczbą komórek :param cell_size: bok komórki w pikselach """ pygame.init() self.board = Board(width * cell_size, height * cell_size) # zegar którego użyjemy do kontrolowania szybkości rysowania # kolejnych klatek gry self.fps_clock = pygame.time.Clock() def run(self): """ Główna pętla gry """ while not self.handle_events(): # działaj w pętli do momentu otrzymania sygnału do wyjścia self.board.draw() self.fps_clock.tick(15) def handle_events(self): """ Obsługa zdarzeń systemowych, tutaj zinterpretujemy np. ruchy myszką :return True jeżeli pygame przekazał zdarzenie wyjścia z gry """ for event in pygame.event.get(): if event.type == pygame.locals.QUIT: pygame.quit() return True DEAD = 0 ALIVE = 1 class Population(object): """ Populacja komórek """ def __init__(self, width, height, cell_size=10): """ Przygotowuje ustawienia populacji :param width: szerokość planszy mierzona liczbą komórek :param height: wysokość planszy mierzona liczbą komórek :param cell_size: bok komórki w pikselach """ self.box_size = cell_size self.height = height self.width = width self.generation = self.reset_generation() def reset_generation(self): """ Tworzy i zwraca macierz pustej populacji """ # w pętli wypełnij listę kolumnami # które także w pętli zostają wypełnione wartością 0 (DEAD) return [[DEAD for y in xrange(self.height)] for x in xrange(self.width)] def handle_mouse(self): # pobierz stan guzików myszki z wykorzystaniem funcji pygame buttons = pygame.mouse.get_pressed() if not any(buttons): # ignoruj zdarzenie jeśli żaden z guzików nie jest wciśnięty return # dodaj żywą komórką jeśli wciśnięty jest pierwszy guzik myszki # będziemy mogli nie tylko dodawać żywe komórki ale także je usuwać alive = True if buttons[0] else False # pobierz pozycję kursora na planszy mierzoną w pikselach x, y = pygame.mouse.get_pos() # przeliczamy współrzędne komórki z pikseli na współrzędne komórki w macierz # gracz może kliknąć w kwadracie o szerokości box_size by wybrać komórkę x /= self.box_size y /= self.box_size # ustaw stan komórki na macierzy self.generation[x][y] = ALIVE if alive else DEAD def draw_on(self, surface): """ Rysuje komórki na planszy """ for x, y in self.alive_cells(): size = (self.box_size, self.box_size) position = (x * self.box_size, y * self.box_size) color = (255, 255, 255) thickness = 1 pygame.draw.rect(surface, color, pygame.locals.Rect(position, size), thickness) def alive_cells(self): """ Generator zwracający współrzędne żywych komórek. """ for x in range(len(self.generation)): column = self.generation[x] for y in range(len(column)): if column[y] == ALIVE: # jeśli komórka jest żywa zwrócimy jej współrzędne yield x, y if __name__ == "__main__": game = GameOfLife(80, 40) game.run()
""" File-based Checkpoints implementations. """ import os import shutil from tornado.web import HTTPError from .checkpoints import ( Checkpoints, GenericCheckpointsMixin, ) from .fileio import FileManagerMixin from IPython.utils import tz from IPython.utils.path import ensure_dir_exists from IPython.utils.py3compat import getcwd from IPython.utils.traitlets import Unicode class FileCheckpoints(FileManagerMixin, Checkpoints): """ A Checkpoints that caches checkpoints for files in adjacent directories. Only works with FileContentsManager. Use GenericFileCheckpoints if you want file-based checkpoints with another ContentsManager. """ checkpoint_dir = Unicode( '.ipynb_checkpoints', config=True, help="""The directory name in which to keep file checkpoints This is a path relative to the file's own directory. By default, it is .ipynb_checkpoints """, ) root_dir = Unicode(config=True) def _root_dir_default(self): try: return self.parent.root_dir except AttributeError: return getcwd() # ContentsManager-dependent checkpoint API def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" checkpoint_id = u'checkpoint' src_path = contents_mgr._get_os_path(path) dest_path = self.checkpoint_path(checkpoint_id, path) self._copy(src_path, dest_path) return self.checkpoint_model(checkpoint_id, dest_path) def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" src_path = self.checkpoint_path(checkpoint_id, path) dest_path = contents_mgr._get_os_path(path) self._copy(src_path, dest_path) # ContentsManager-independent checkpoint API def rename_checkpoint(self, checkpoint_id, old_path, new_path): """Rename a checkpoint from old_path to new_path.""" old_cp_path = self.checkpoint_path(checkpoint_id, old_path) new_cp_path = self.checkpoint_path(checkpoint_id, new_path) if os.path.isfile(old_cp_path): self.log.debug( "Renaming checkpoint %s -> %s", old_cp_path, new_cp_path, ) with self.perm_to_403(): shutil.move(old_cp_path, new_cp_path) def delete_checkpoint(self, checkpoint_id, path): """delete a file's checkpoint""" path = path.strip('/') cp_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(cp_path): self.no_such_checkpoint(path, checkpoint_id) self.log.debug("unlinking %s", cp_path) with self.perm_to_403(): os.unlink(cp_path) def list_checkpoints(self, path): """list the checkpoints for a given file This contents manager currently only supports one checkpoint per file. """ path = path.strip('/') checkpoint_id = "checkpoint" os_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_path): return [] else: return [self.checkpoint_model(checkpoint_id, os_path)] # Checkpoint-related utilities def checkpoint_path(self, checkpoint_id, path): """find the path to a checkpoint""" path = path.strip('/') parent, name = ('/' + path).rsplit('/', 1) parent = parent.strip('/') basename, ext = os.path.splitext(name) filename = u"{name}-{checkpoint_id}{ext}".format( name=basename, checkpoint_id=checkpoint_id, ext=ext, ) os_path = self._get_os_path(path=parent) cp_dir = os.path.join(os_path, self.checkpoint_dir) with self.perm_to_403(): ensure_dir_exists(cp_dir) cp_path = os.path.join(cp_dir, filename) return cp_path def checkpoint_model(self, checkpoint_id, os_path): """construct the info dict for a given checkpoint""" stats = os.stat(os_path) last_modified = tz.utcfromtimestamp(stats.st_mtime) info = dict( id=checkpoint_id, last_modified=last_modified, ) return info # Error Handling def no_such_checkpoint(self, path, checkpoint_id): raise HTTPError( 404, u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id) ) class GenericFileCheckpoints(GenericCheckpointsMixin, FileCheckpoints): """ Local filesystem Checkpoints that works with any conforming ContentsManager. """ def create_file_checkpoint(self, content, format, path): """Create a checkpoint from the current content of a file.""" path = path.strip('/') # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): self._save_file(os_checkpoint_path, content, format=format) # return the checkpoint info return self.checkpoint_model(checkpoint_id, os_checkpoint_path) def create_notebook_checkpoint(self, nb, path): """Create a checkpoint from the current content of a notebook.""" path = path.strip('/') # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): self._save_notebook(os_checkpoint_path, nb) # return the checkpoint info return self.checkpoint_model(checkpoint_id, os_checkpoint_path) def get_notebook_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a notebook.""" path = path.strip('/') self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) return { 'type': 'notebook', 'content': self._read_notebook( os_checkpoint_path, as_version=4, ), } def get_file_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a file.""" path = path.strip('/') self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) content, format = self._read_file(os_checkpoint_path, format=None) return { 'type': 'file', 'content': content, 'format': format, }
"""Support for monitoring emoncms feeds.""" from __future__ import annotations from datetime import timedelta from http import HTTPStatus import logging import requests import voluptuous as vol from homeassistant.components.sensor import ( PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorStateClass, ) from homeassistant.const import ( CONF_API_KEY, CONF_ID, CONF_SCAN_INTERVAL, CONF_UNIT_OF_MEASUREMENT, CONF_URL, CONF_VALUE_TEMPLATE, POWER_WATT, STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import template import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) ATTR_FEEDID = "FeedId" ATTR_FEEDNAME = "FeedName" ATTR_LASTUPDATETIME = "LastUpdated" ATTR_LASTUPDATETIMESTR = "LastUpdatedStr" ATTR_SIZE = "Size" ATTR_TAG = "Tag" ATTR_USERID = "UserId" CONF_EXCLUDE_FEEDID = "exclude_feed_id" CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id" CONF_SENSOR_NAMES = "sensor_names" DECIMALS = 2 DEFAULT_UNIT = POWER_WATT MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5) ONLY_INCL_EXCL_NONE = "only_include_exclude_or_none" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_URL): cv.string, vol.Required(CONF_ID): cv.positive_int, vol.Exclusive(CONF_ONLY_INCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Exclusive(CONF_EXCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Optional(CONF_SENSOR_NAMES): vol.All( {cv.positive_int: vol.All(cv.string, vol.Length(min=1))} ), vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=DEFAULT_UNIT): cv.string, } ) def get_id(sensorid, feedtag, feedname, feedid, feeduserid): """Return unique identifier for feed / sensor.""" return f"emoncms{sensorid}_{feedtag}_{feedname}_{feedid}_{feeduserid}" def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Emoncms sensor.""" apikey = config.get(CONF_API_KEY) url = config.get(CONF_URL) sensorid = config.get(CONF_ID) value_template = config.get(CONF_VALUE_TEMPLATE) config_unit = config.get(CONF_UNIT_OF_MEASUREMENT) exclude_feeds = config.get(CONF_EXCLUDE_FEEDID) include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID) sensor_names = config.get(CONF_SENSOR_NAMES) interval = config.get(CONF_SCAN_INTERVAL) if value_template is not None: value_template.hass = hass data = EmonCmsData(hass, url, apikey, interval) data.update() if data.data is None: return sensors = [] for elem in data.data: if exclude_feeds is not None and int(elem["id"]) in exclude_feeds: continue if include_only_feeds is not None and int(elem["id"]) not in include_only_feeds: continue name = None if sensor_names is not None: name = sensor_names.get(int(elem["id"]), None) if unit := elem.get("unit"): unit_of_measurement = unit else: unit_of_measurement = config_unit sensors.append( EmonCmsSensor( hass, data, name, value_template, unit_of_measurement, str(sensorid), elem, ) ) add_entities(sensors) class EmonCmsSensor(SensorEntity): """Implementation of an Emoncms sensor.""" def __init__( self, hass, data, name, value_template, unit_of_measurement, sensorid, elem ): """Initialize the sensor.""" if name is None: # Suppress ID in sensor name if it's 1, since most people won't # have more than one EmonCMS source and it's redundant to show the # ID if there's only one. id_for_name = "" if str(sensorid) == "1" else sensorid # Use the feed name assigned in EmonCMS or fall back to the feed ID feed_name = elem.get("name") or f"Feed {elem['id']}" self._name = f"EmonCMS{id_for_name} {feed_name}" else: self._name = name self._identifier = get_id( sensorid, elem["tag"], elem["name"], elem["id"], elem["userid"] ) self._hass = hass self._data = data self._value_template = value_template self._unit_of_measurement = unit_of_measurement self._sensorid = sensorid self._elem = elem if unit_of_measurement == "kWh": self._attr_device_class = SensorDeviceClass.ENERGY self._attr_state_class = SensorStateClass.TOTAL_INCREASING elif unit_of_measurement == "W": self._attr_device_class = SensorDeviceClass.POWER self._attr_state_class = SensorStateClass.MEASUREMENT if self._value_template is not None: self._state = self._value_template.render_with_possible_json_value( elem["value"], STATE_UNKNOWN ) else: self._state = round(float(elem["value"]), DECIMALS) @property def name(self): """Return the name of the sensor.""" return self._name @property def native_unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def native_value(self): """Return the state of the device.""" return self._state @property def extra_state_attributes(self): """Return the attributes of the sensor.""" return { ATTR_FEEDID: self._elem["id"], ATTR_TAG: self._elem["tag"], ATTR_FEEDNAME: self._elem["name"], ATTR_SIZE: self._elem["size"], ATTR_USERID: self._elem["userid"], ATTR_LASTUPDATETIME: self._elem["time"], ATTR_LASTUPDATETIMESTR: template.timestamp_local(float(self._elem["time"])), } def update(self): """Get the latest data and updates the state.""" self._data.update() if self._data.data is None: return elem = next( ( elem for elem in self._data.data if get_id( self._sensorid, elem["tag"], elem["name"], elem["id"], elem["userid"], ) == self._identifier ), None, ) if elem is None: return self._elem = elem if self._value_template is not None: self._state = self._value_template.render_with_possible_json_value( elem["value"], STATE_UNKNOWN ) else: self._state = round(float(elem["value"]), DECIMALS) class EmonCmsData: """The class for handling the data retrieval.""" def __init__(self, hass, url, apikey, interval): """Initialize the data object.""" self._apikey = apikey self._url = f"{url}/feed/list.json" self._interval = interval self._hass = hass self.data = None @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from Emoncms.""" try: parameters = {"apikey": self._apikey} req = requests.get( self._url, params=parameters, allow_redirects=True, timeout=5 ) except requests.exceptions.RequestException as exception: _LOGGER.error(exception) return else: if req.status_code == HTTPStatus.OK: self.data = req.json() else: _LOGGER.error( "Please verify if the specified configuration value " "'%s' is correct! (HTTP Status_code = %d)", CONF_URL, req.status_code, )
import glob import logging import os import subprocess from plugins import BaseAligner from yapsy.IPlugin import IPlugin from assembly import get_qual_encoding class Bowtie2Aligner(BaseAligner, IPlugin): def run(self): """ Map READS to CONTIGS and return alignment. Set MERGED_PAIR to True if reads[1] is a merged paired end file """ contig_file = self.data.contigfiles[0] reads = self.data.readfiles ## Index contigs prefix = os.path.join(self.outpath, 'bt2') cmd_args = [self.build_bin, '-f', contig_file, prefix] self.arast_popen(cmd_args, overrides=False) ### Align reads bamfiles = [] for i, readset in enumerate(self.data.readsets): samfile = os.path.join(self.outpath, 'align.sam') reads = readset.files cmd_args = [self.executable, '-x', prefix, '-S', samfile, '-p', self.process_threads_allowed] if len(reads) == 2: cmd_args += ['-1', reads[0], '-2', reads[1]] elif len(reads) == 1: cmd_args += ['-U', reads[0]] else: raise Exception('Bowtie plugin error') self.arast_popen(cmd_args, overrides=False) if not os.path.exists(samfile): raise Exception('Unable to complete alignment') ## Convert to BAM bamfile = samfile.replace('.sam', '.bam') cmd_args = ['samtools', 'view', '-bSho', bamfile, samfile] self.arast_popen(cmd_args) bamfiles.append(bamfile) ### Merge samfiles if multiple if len(bamfiles) > 1: bamfile = os.path.join(self.outpath, '{}_{}.bam'.format(os.path.basename(contig_file), i)) self.arast_popen(['samtools', 'merge', bamfile] + bamfiles) if not os.path.exists(bamfile): raise Exception('Unable to complete alignment') else: bamfile = bamfiles[0] if not os.path.exists(bamfile): raise Exception('Unable to complete alignment') ## Convert back to sam samfile = bamfile.replace('.bam', '.sam') self.arast_popen(['samtools', 'view', '-h', '-o', samfile, bamfile]) return {'alignment': samfile, 'alignment_bam': bamfile}
from __future__ import unicode_literals import os import os.path import subprocess from pre_commit.util import cmd_output class PrefixedCommandRunner(object): """A PrefixedCommandRunner allows you to run subprocess commands with comand substitution. For instance: PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz']) will run ['/tmp/foo/foo.sh', 'bar', 'baz'] """ def __init__( self, prefix_dir, popen=subprocess.Popen, makedirs=os.makedirs ): self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep self.__popen = popen self.__makedirs = makedirs def _create_path_if_not_exists(self): if not os.path.exists(self.prefix_dir): self.__makedirs(self.prefix_dir) def run(self, cmd, **kwargs): self._create_path_if_not_exists() replaced_cmd = [ part.replace('{prefix}', self.prefix_dir) for part in cmd ] return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs) def path(self, *parts): path = os.path.join(self.prefix_dir, *parts) return os.path.normpath(path) def exists(self, *parts): return os.path.exists(self.path(*parts)) @classmethod def from_command_runner(cls, command_runner, path_end): """Constructs a new command runner from an existing one by appending `path_end` to the command runner's prefix directory. """ return cls( command_runner.path(path_end), popen=command_runner.__popen, makedirs=command_runner.__makedirs, )
""" Python Blueprint ================ Does not install python itself, only develop and setup tools. Contains pip helper for other blueprints to use. **Fabric environment:** .. code-block:: yaml blueprints: - blues.python """ from fabric.decorators import task from refabric.api import run, info from refabric.context_managers import sudo from . import debian __all__ = ['setup'] pip_log_file = '/tmp/pip.log' @task def setup(): """ Install python develop tools """ install() def install(): with sudo(): info('Install python dependencies') debian.apt_get('install', 'python-dev', 'python-setuptools') run('easy_install pip') run('touch {}'.format(pip_log_file)) debian.chmod(pip_log_file, mode=777) pip('install', 'setuptools', '--upgrade') def pip(command, *options): info('Running pip {}', command) run('pip {0} {1} -v --log={2} --log-file={2}'.format(command, ' '.join(options), pip_log_file))
from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible import __version__ from ansible.errors import AnsibleError from distutils.version import LooseVersion from operator import eq, ge, gt from sys import version_info try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() version_requirement = '2.5.0.0' version_tested_max = '2.7.5' python3_required_version = '2.5.3' if version_info[0] == 3 and not ge(LooseVersion(__version__), LooseVersion(python3_required_version)): raise AnsibleError(('Ansible >= {} is required when using Python 3.\n' 'Either downgrade to Python 2 or update your Ansible version to {}.').format(python3_required_version, python3_required_version)) if not ge(LooseVersion(__version__), LooseVersion(version_requirement)): raise AnsibleError(('Trellis no longer supports Ansible {}.\n' 'Please upgrade to Ansible {} or higher.').format(__version__, version_requirement)) elif gt(LooseVersion(__version__), LooseVersion(version_tested_max)): display.warning(u'You Ansible version is {} but this version of Trellis has only been tested for ' u'compatability with Ansible {} -> {}. It is advisable to check for Trellis updates or ' u'downgrade your Ansible version.'.format(__version__, version_requirement, version_tested_max)) if eq(LooseVersion(__version__), LooseVersion('2.5.0')): display.warning(u'You Ansible version is {}. Consider upgrading your Ansible version to avoid ' u'erroneous warnings such as `Removed restricted key from module data...`'.format(__version__)) from ansible.plugins.vars import BaseVarsPlugin class VarsModule(BaseVarsPlugin): def get_vars(self, loader, path, entities, cache=True): return {}
""" A Pygments lexer for Magpie. """ from setuptools import setup __author__ = 'Robert Nystrom' setup( name='Magpie', version='1.0', description=__doc__, author=__author__, packages=['magpie'], entry_points=''' [pygments.lexers] magpielexer = magpie:MagpieLexer ''' )