text
stringlengths
6
947k
repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
from __future__ import absolute_import import logging from requests.exceptions import RequestException from sentry import options from sentry.http import Session from sentry.lang.native.utils import sdk_info_to_sdk_id MAX_ATTEMPTS = 3 logger = logging.getLogger(__name__) def lookup_system_symbols(symbols, sdk_info=None, cpu_name=None): """Looks for system symbols in the configured system server if enabled. If this failes or the server is disabled, `None` is returned. """ if not options.get('symbolserver.enabled'): return url = '%s/lookup' % options.get('symbolserver.options')['url'].rstrip('/') sess = Session() symbol_query = { 'sdk_id': sdk_info_to_sdk_id(sdk_info), 'cpu_name': cpu_name, 'symbols': symbols, } attempts = 0 with sess: while 1: try: rv = sess.post(url, json=symbol_query) # If the symbols server does not know about the SDK at all # it will report a 404 here. In that case just assume # that we did not find a match and do not retry. if rv.status_code == 404: return None rv.raise_for_status() return rv.json()['symbols'] except (IOError, RequestException): attempts += 1 if attempts > MAX_ATTEMPTS: logger.error('Failed to contact system symbol server', exc_info=True) return
looker/sentry
src/sentry/lang/native/systemsymbols.py
Python
bsd-3-clause
1,526
0.000655
# ./_qdt.py # -*- coding: utf-8 -*- # PyXB bindings for NM:763e66503f6e9797a3b5522270417bad82c9c82c # Generated 2015-02-11 21:35:49.975995 by PyXB version 1.2.4 using Python 2.6.9.final.0 # Namespace urn:oasis:names:specification:ubl:schema:xsd:QualifiedDataTypes-2 [xmlns:qdt] from __future__ import unicode_literals import pyxb import pyxb.binding import pyxb.binding.saxer import io import pyxb.utils.utility import pyxb.utils.domutils import sys import pyxb.utils.six as _six # Unique identifier for bindings created at the same time _GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:2b2e2fd1-b225-11e4-b26c-14109fe53921') # Version of PyXB used to generate the bindings _PyXBVersion = '1.2.4' # Generated bindings are not compatible across PyXB versions if pyxb.__version__ != _PyXBVersion: raise pyxb.PyXBVersionError(_PyXBVersion) # Import bindings for namespaces imported into schema # NOTE: All namespace declarations are reserved within the binding Namespace = pyxb.namespace.NamespaceForURI('urn:oasis:names:specification:ubl:schema:xsd:QualifiedDataTypes-2', create_if_missing=True) Namespace.configureCategories(['typeBinding', 'elementBinding']) def CreateFromDocument (xml_text, default_namespace=None, location_base=None): """Parse the given XML and use the document element to create a Python instance. @param xml_text An XML document. This should be data (Python 2 str or Python 3 bytes), or a text (Python 2 unicode or Python 3 str) in the L{pyxb._InputEncoding} encoding. @keyword default_namespace The L{pyxb.Namespace} instance to use as the default namespace where there is no default namespace in scope. If unspecified or C{None}, the namespace of the module containing this function will be used. @keyword location_base: An object to be recorded as the base of all L{pyxb.utils.utility.Location} instances associated with events and objects handled by the parser. You might pass the URI from which the document was obtained. """ if pyxb.XMLStyle_saxer != pyxb._XMLStyle: dom = pyxb.utils.domutils.StringToDOM(xml_text) return CreateFromDOM(dom.documentElement, default_namespace=default_namespace) if default_namespace is None: default_namespace = Namespace.fallbackNamespace() saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base) handler = saxer.getContentHandler() xmld = xml_text if isinstance(xmld, _six.text_type): xmld = xmld.encode(pyxb._InputEncoding) saxer.parse(io.BytesIO(xmld)) instance = handler.rootObject() return instance def CreateFromDOM (node, default_namespace=None): """Create a Python instance from the given DOM node. The node tag must correspond to an element declaration in this module. @deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}.""" if default_namespace is None: default_namespace = Namespace.fallbackNamespace() return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
getodacu/eSENS-eDocument
profiles/e_confirmation/xb_request/_qdt.py
Python
mit
3,119
0.003847
import boto3 import logging logger = logging.getLogger() logger.setLevel(logging.INFO) def lambda_handler(event, context): logger.info("RECEIVED EVENT: %s"%( str( event )) ) params = event['params'] sid = params['MessageSid'] from_number = params['From'] to_number = params['To'] body = params['Body'] logger.info("RECEIVED MESSAGE SID: %s, FROM: %s, TO: %s, BODY: %s" % ( sid, from_number, to_number, body)) client = boto3.client('dynamodb') client.put_item(TableName="sms_messages", Item={ "sid": {'S': sid}, "from": {'S': from_number}, "to": {'S': to_number}, "body": {'S': body}}) return ""
sswaner/twilio-aws
resources/lambda_functions/sms_message_handler.py
Python
gpl-3.0
671
0.011923
from django.contrib import admin # Register your models here. from .models import Uploader admin.site.register(Uploader)
paihu/moebox
admin.py
Python
mit
124
0
artifact = u"Artifact" creature = u"Creature" enchantment = u"Enchantment" land = u"Land" planeswalker = u"Planeswalker" instant = u"Instant" sorcery = u"Sorcery" permanents = frozenset({artifact, creature, enchantment, land, planeswalker}) nonpermanents = frozenset({instant, sorcery}) all = permanents | nonpermanents unimplemented = frozenset({u"Plane", u"Scheme", u"Tribal", u"Vanguard"}) supertypes = frozenset({u"Basic", u"Legendary", u"Ongoing", u"Snow", u"World"}) subtypes = { artifact : frozenset({u"Contraption", u"Equipment", u"Fortification"}), creature : frozenset({ u"Advisor", u"Ally", u"Angel", u"Anteater", u"Antelope", u"Ape", u"Archer", u"Archon", u"Artificer", u"Assassin", u"Assembly-Worker", u"Atog", u"Aurochs", u"Avatar", u"Badger", u"Barbarian", u"Basilisk", u"Bat", u"Bear", u"Beast", u"Beeble", u"Berserker", u"Bird", u"Blinkmoth", u"Boar", u"Bringer", u"Brushwagg", u"Camarid", u"Camel", u"Caribou", u"Carrier", u"Cat", u"Centaur", u"Cephalid", u"Chimera", u"Citizen", u"Cleric", u"Cockatrice", u"Construct", u"Coward", u"Crab", u"Crocodile", u"Cyclops", u"Dauthi", u"Demon", u"Deserter", u"Devil", u"Djinn", u"Dragon", u"Drake", u"Dreadnought", u"Drone", u"Druid", u"Dryad", u"Dwarf", u"Efreet", u"Elder", u"Eldrazi", u"Elemental", u"Elephant", u"Elf", u"Elk", u"Eye", u"Faerie", u"Ferret", u"Fish", u"Flagbearer", u"Fox", u"Frog", u"Fungus", u"Gargoyle", u"Germ", u"Giant", u"Gnome", u"Goat", u"Goblin", u"Golem", u"Gorgon", u"Graveborn", u"Gremlin", u"Griffin", u"Hag", u"Harpy", u"Hellion", u"Hippo", u"Hippogriff", u"Homarid", u"Homunculus", u"Horror", u"Horse", u"Hound", u"Human", u"Hydra", u"Hyena", u"Illusion", u"Imp", u"Incarnation", u"Insect", u"Jellyfish", u"Juggernaut", u"Kavu", u"Kirin", u"Kithkin", u"Knight", u"Kobold", u"Kor", u"Kraken", u"Lammasu", u"Leech", u"Leviathan", u"Lhurgoyf", u"Licid", u"Lizard", u"Manticore", u"Masticore", u"Mercenary", u"Merfolk", u"Metathran", u"Minion", u"Minotaur", u"Monger", u"Mongoose", u"Monk", u"Moonfolk", u"Mutant", u"Myr", u"Mystic", u"Nautilus", u"Nephilim", u"Nightmare", u"Nightstalker", u"Ninja", u"Noggle", u"Nomad", u"Octopus", u"Ogre", u"Ooze", u"Orb", u"Orc", u"Orgg", u"Ouphe", u"Ox", u"Oyster", u"Pegasus", u"Pentavite", u"Pest", u"Phelddagrif", u"Phoenix", u"Pincher", u"Pirate", u"Plant", u"Praetor", u"Prism", u"Rabbit", u"Rat", u"Rebel", u"Reflection", u"Rhino", u"Rigger", u"Rogue", u"Salamander", u"Samurai", u"Sand", u"Saproling", u"Satyr", u"Scarecrow", u"Scorpion", u"Scout", u"Serf", u"Serpent", u"Shade", u"Shaman", u"Shapeshifter", u"Sheep", u"Siren", u"Skeleton", u"Slith", u"Sliver", u"Slug", u"Snake", u"Soldier", u"Soltari", u"Spawn", u"Specter", u"Spellshaper", u"Sphinx", u"Spider", u"Spike", u"Spirit", u"Splinter", u"Sponge", u"Squid", u"Squirrel", u"Starfish", u"Surrakar", u"Survivor", u"Tetravite", u"Thalakos", u"Thopter", u"Thrull", u"Treefolk", u"Triskelavite", u"Troll", u"Turtle", u"Unicorn", u"Vampire", u"Vedalken", u"Viashino", u"Volver", u"Wall", u"Warrior", u"Weird", u"Werewolf", u"Whale", u"Wizard", u"Wolf", u"Wolverine", u"Wombat", u"Worm", u"Wraith", u"Wurm", u"Yeti", u"Zombie", u"Zubera" }), enchantment : frozenset({u"Aura", u"Curse", u"Shrine"}), instant : frozenset({u"Arcane", u"Trap"}), u"Basic Land" : frozenset({ u"Forest", u"Island", u"Mountain", u"Plains", u"Swamp" }), u"Non-Basic Land" : frozenset({ u"Desert", u"Lair", u"Locus", u"Mine", u"Power-Plant", u"Tower", u"Urza's" }), planeswalker : frozenset({ u"Ajani", u"Bolas", u"Chandra", u"Elspeth", u"Garruk", u"Gideon", u"Jace", u"Karn", u"Koth", u"Liliana", u"Nissa", u"Sarkhan", u"Sorin", u"Tezzeret", u"Venser" }), } subtypes[sorcery] = subtypes[instant] subtypes[land] = subtypes[u"Basic Land"] | subtypes[u"Non-Basic Land"]
Julian/cardboard
cardboard/types.py
Python
mit
4,144
0.001689
import urllib2 import base64 import simplejson as json import logging from urllib import urlencode from functools import partial log = logging.getLogger(__name__) log_formatter = logging.Formatter('%(name)s - %(message)s') log_handler = logging.StreamHandler() log_handler.setFormatter(log_formatter) log.addHandler(log_handler) log.setLevel(logging.ERROR) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] urllib2.install_opener(opener) class EndPointPartial(partial): def __new__(cls, func, conf, _repr): cls._repr = _repr return super(EndPointPartial, cls).__new__(cls, func, conf) def __repr__(self): return unicode('<API endpoint %s>' % self._repr) class CampBX(object): """Camp BX API Class""" username = None password = None api_url = 'https://campbx.com/api/' log = None # API endpoints # { python_call : (url_php_call, requires_auth) } endpoints = { 'xdepth': ('xdepth', False), 'xticker': ('xticker', False), 'my_funds': ('myfunds', True), 'my_orders': ('myorders', True), 'my_margins': ('mymargins', True), 'get_btc_address': ('getbtcaddr', True), 'send_instant': ('sendinstant', True), 'send_btc': ('sendbtc', True), 'trade_cancel': ('tradecancel', True), 'trade_enter': ('tradeenter', True), 'trade_advanced': ('tradeadv', True), } def __init__(self, username=None, password=None): self.username = username self.password = password # setup logging self.log = log # append all the enpoints to the class dictionary self._create_endpoints() def debug_mode(self, toggle): """ Toggle debug mode for more detailed output obj.debug_mode(True) - Turn debug mode on obj.debug_mode(False) - Turn debug mode off """ if toggle: self.log.setLevel(logging.DEBUG) else: self.log.setLevel(logging.ERROR) def _make_request(self, conf, post_params={}): """Make a request to the API and return data in a pythonic object""" endpoint, requires_auth = conf # setup the url and the request objects url = '%s%s.php' % (self.api_url, endpoint) log.debug('Setting url to %s' % url) request = urllib2.Request(url) request.add_header('User-Agent', 'Mozilla/5.0') # tack on authentication if needed log.debug('Post params: %s' % post_params) if requires_auth: post_params.update({ 'user': self.username, 'pass': self.password }) # url encode all parameters data = urlencode(post_params) # gimme some bitcoins! try: log.debug('Requesting data from %s' % url) response = urllib2.urlopen(request, data) return json.loads(response.read()) except urllib2.URLError, e: log.debug('Full error: %s' % e) if hasattr(e, 'reason'): self.log.error('Could not reach host. Reason: %s' % e.reason) elif hasattr(e, 'code'): self.log.error('Could not fulfill request. Error Code: %s' % e.code) return None def _create_endpoints(self): """Create all api endpoints using self.endpoint and partial from functools""" for k, v in self.endpoints.items(): _repr = '%s.%s' % (self.__class__.__name__, k) self.__dict__[k] = EndPointPartial(self._make_request, v, _repr)
rozap/arb
src/api/campbx.py
Python
mit
3,615
0.00083
#This script is made by cotax #cotax is blenderartists.org user's nickname. #1. Place a lamp in the scene and put its energy to 0.0 #2. Connect this script to the lamp, always(true)- python #- Add a property: energy(float) to the lamp #- Add a property: distance(integer) to the lamp #Set the energy and distance to your likings from bge import logic own = logic.getCurrentController().owner cam = own.scene.active_camera #get the distance and energy from the light distance = own['distance'] energy = own['energy'] #check distance and set the energy if own.getDistanceTo(cam) < distance: own.energy = energy else: own.energy = 0.0
adriansnetlis/bgmc16minosaur
bgmc16minosaur/Assets/Scripts/LightLOD.py
Python
gpl-2.0
659
0.018209
import matplotlib.pyplot as plt from collections import defaultdict from itertools import combinations from pprint import pprint from scipy import stats import random from itertools import chain import results def choose_points(qr_list): return [d.total_response() - getattr(d, 'median', 0) for d in qr_list] def check_data(data, p_threshold=0.01): """ combinatoric KS, add hits """ data = data.sample(5001) # data.median_filter(choose_points) data_roundup = defaultdict(int) for k1, k2 in combinations(data.keys(), 2): # DON'T EVER USE A SAMPLE SIZE THAT IS A MULTIPLE OF 100 d, p = stats.ks_2samp(choose_points(data[k1]), choose_points(data[k2])) print k1, k2, d, p if p < p_threshold: data_roundup[k1] += 1 data_roundup[k2] += 1 return dict(data_roundup) data = results.read_data(bucket=r'^/api/\w{3}(\w)\w{6}/config$', data_dir='more_recent_data') pprint(check_data(data)) exit() correct = 0 incorrect = 0 unclear = 0 shortened = [] shorten_error = 0 ANSWER = '0' for x in range(1000): print "Iteration: ", x res = check_data(data) if not res: unclear += 1 continue if ANSWER not in res.keys() and max(res.values()) >= 4: pprint(res) print "shorten error" shorten_error += 1 if max(res.values()) >= 4 and len(res.values()) < 8: shortened.append(8 - len(res.values())) sri = sorted(res.items(), key=lambda x: -x[1]) pprint(sri) if sri[0][0] == ANSWER and sri[0][1] <= sri[1][1] + 2 and sri[0][1] <= 5: unclear += 1 elif sri[0][0] == ANSWER: correct += 1 else: incorrect += 1 print correct, incorrect, float(correct)/(incorrect + correct) * 100.0 print "shorten error ", shorten_error, " unclear: ", unclear print "Shortened: ", len(shortened), shortened
PaulMcMillan/2014_defcon_timing
hue/vis6.py
Python
bsd-2-clause
1,918
0.001564
#!/usr/bin/env python """ EvoLife Cellular Automaton implementation using CUDA. Rules are: - Each living cell has its own birth/sustain ruleset and an energy level; - Cell is loosing all energy if number of neighbours is not in its sustain rule; - Cell is born with max energy if there are exactly N neighbours with N in their birth rule; - Same is applied for living cells (re-occupation case), but only with different genomes; - If there are several birth situations with different N possible, we choose one with larger N; - Newly born cell's ruleset calculated as crossover between 'parent' cells rulesets; - If cell is involved in breeding as a 'parent', it's loosing `BIRTH_COST` units of energy per each non-zero gene passed; - This doesn't apply in re-occupation case; - Every turn, cell is loosing `DEATH_SPEED` units of energy; - Cell with zero energy is dying; - Cell cannot have more than `MAX_GENES` non-zero genes in ruleset. Additional rule is: board has torus topology. So, if all cells initially has B3/S23 ruleset, DEATH_SPEED = BIRTH_COST = 0, MAX_GENES >= 3, we have exact Conway rules. But if there were more than one ruleset initially, evolution may begin. There are 2^18 possible rulesets, only a small fraction of which have been studied in any detail. So, who knows what we may discover with evolutionary rules :) CONTROLS: Arrows move field +/- zoom in/out ]/[ speed up/down F toggle fullscreen S dump board state to a file Q/ESC quit Prerequisites: pycuda, numpy, scipy, pygame, scikit-image Debian: apt-get install python-pycuda python-numpy python-pygame python-scipy python-setuptools Author: a5kin Copyright: MIT License. """ import sys, time, math, colorsys, random, traceback import pygame from pygame.locals import * import numpy as np from scipy.misc import imsave import scipy.ndimage.interpolation from skimage import transform as tf import importlib import pycuda.driver as drv import pycuda.tools import pycuda.autoinit from pycuda.compiler import SourceModule import pycuda.gpuarray as gpuarray from pycuda.elementwise import ElementwiseKernel try: expmod = importlib.import_module('experiments.' + sys.argv[1]) DEATH_SPEED = expmod.DEATH_SPEED BIRTH_COST = expmod.BIRTH_COST MAX_GENES = expmod.MAX_GENES FIELD_WIDTH = expmod.FIELD_WIDTH FIELD_HEIGHT = expmod.FIELD_HEIGHT SAVE_FRAMES = expmod.SAVE_FRAMES DOWNSCALE_FACTOR = expmod.DOWNSCALE_FACTOR FRAME_SKIP = expmod.FRAME_SKIP RANDOM_SEED = expmod.RANDOM_SEED fld_init = expmod.fld_init except: print "No experiment preset found, loading default (big_bang)." DEATH_SPEED = 0 BIRTH_COST = 0 MAX_GENES = 9 FIELD_WIDTH = 1280 FIELD_HEIGHT = 720 SAVE_FRAMES = False DOWNSCALE_FACTOR = 1 FRAME_SKIP = 1 RANDOM_SEED = None def fld_init(a): return np.asarray([[(random.choice([0, 1]) * random.randint(0, 256*512) if (i < 100 and j < 100) else 0) for j in range(a.height)] for i in range(a.width)]).astype(np.int32) step_gpu = ElementwiseKernel("unsigned int *fld, unsigned int *fld_new, unsigned int *seeds, unsigned int *img, int w, int h", """ int x = i / h; int y = i % h; // torus topology emulation int xm1 = x - 1; if (xm1 < 0) xm1 = w + xm1; int xp1 = x + 1; if (xp1 >= w) xp1 = xp1 - w; int ym1 = y - 1; if (ym1 < 0) ym1 = h + ym1; int yp1 = y + 1; if (yp1 >= h) yp1 = yp1 - h; // cache neighbours values uint f0 = fld[i]; uint f1 = fld[xm1 * h + ym1]; uint f2 = fld[x * h + ym1]; uint f3 = fld[xp1 * h + ym1]; uint f4 = fld[xm1 * h + y]; uint f5 = fld[xp1 * h + y]; uint f6 = fld[xm1 * h + yp1]; uint f7 = fld[x * h + yp1]; uint f8 = fld[xp1 * h + yp1]; uint img0 = img[i]; uint energy = (f0 >> 17); // total number of neighbours int N = EXISTS(f1) + EXISTS(f2) + EXISTS(f3) + EXISTS(f4) + EXISTS(f5) + EXISTS(f6) + EXISTS(f7) + EXISTS(f8); if (energy >= 0xff || f0 > 0 && (((f0 >> 8) & (1 << N)) == 0)) { // cell is dying fld_new[i] = 0; img[i] = fadeout(img0, 5); } else { uint f00 = f0; for (int ni = 8; ni > 0; ni--) { // cache neighbours breeding fitnesses int ff1 = FIT(f1, ni); int ff2 = FIT(f2, ni); int ff3 = FIT(f3, ni); int ff4 = FIT(f4, ni); int ff5 = FIT(f5, ni); int ff6 = FIT(f6, ni); int ff7 = FIT(f7, ni); int ff8 = FIT(f8, ni); if (ff1 + ff2 + ff3 + ff4 + ff5 + ff6 + ff7 + ff8 == ni) { // neighbours able to breed, cell is born f0 = 0; int gene_num = 0; // crossover breed of parents in deterministic (but geterogeneous) way int seed = seeds[i]; //(((i * 58321) + 11113)) % 8; int genes_count = {2}; int gene; while (gene_num < 17) { if (seed == 0 && ff1 && gene_num < 17) { gene = (1 << (gene_num + seed) % 17) & f1; f0 += gene; gene_num++; if (f00 == 0 && gene > 0) fld_new[xm1 * h + ym1] += ({1} << 17); if (gene > 0) genes_count--; if (genes_count <= 0) break; } if (seed > 0) seed--; if (seed == 0 && ff2 && gene_num < 17) { gene = (1 << (gene_num + seed) % 17) & f2; f0 += gene; gene_num++; if (f00 == 0 && gene > 0) fld_new[x * h + ym1] += ({1} << 17); if (gene > 0) genes_count--; if (genes_count <= 0) break; } if (seed > 0) seed--; if (seed == 0 && ff3 && gene_num < 17) { gene = (1 << (gene_num + seed) % 17) & f3; f0 += gene; gene_num++; if (f00 == 0 && gene > 0) fld_new[xp1 * h + ym1] += ({1} << 17); if (gene > 0) genes_count--; if (genes_count <= 0) break; } if (seed > 0) seed--; if (seed == 0 && ff4 && gene_num < 17) { gene = (1 << (gene_num + seed) % 17) & f4; f0 += gene; gene_num++; if (f00 == 0 && gene > 0) fld_new[xm1 * h + y] += ({1} << 17); if (gene > 0) genes_count--; if (genes_count <= 0) break; } if (seed > 0) seed--; if (seed == 0 && ff5 && gene_num < 17) { gene = (1 << (gene_num + seed) % 17) & f5; f0 += gene; gene_num++; if (f00 == 0 && gene > 0) fld_new[xp1 * h + y] += ({1} << 17); if (gene > 0) genes_count--; if (genes_count <= 0) break; } if (seed > 0) seed--; if (seed == 0 && ff6 && gene_num < 17) { gene = (1 << (gene_num + seed) % 17) & f6; f0 += gene; gene_num++; if (f00 == 0 && gene > 0) fld_new[xm1 * h + yp1] += ({1} << 17); if (gene > 0) genes_count--; if (genes_count <= 0) break; } if (seed > 0) seed--; if (seed == 0 && ff7 && gene_num < 17) { gene = (1 << (gene_num + seed) % 17) & f7; f0 += gene; gene_num++; if (f00 == 0 && gene > 0) fld_new[x * h + yp1] += ({1} << 17); if (gene > 0) genes_count--; if (genes_count <= 0) break; } if (seed > 0) seed--; if (seed == 0 && ff8 && gene_num < 17) { gene = (1 << (gene_num + seed) % 17) & f8; f0 += gene; gene_num++; if (f00 == 0 && gene > 0) fld_new[xp1 * h + yp1] += ({1} << 17); if (gene > 0 && gene > 0) genes_count--; if (genes_count <= 0) break; } if (seed > 0) seed--; } break; } } if ((f00 & 0x1ffff) == (f0 & 0x1ffff)) { f0 = f00; if (f0 != 0) { f0 += ({0} << 17); } } fld_new[i] = f0; energy = (f0 >> 17); if (energy > 0xff) energy = 0xff; img[i] = (f0 == 0 ? fadeout(img0, 5) : hsv2rgb((f0 & 0x1ffff) % 360, 0xff - energy, 255)); } """.replace("{0}", str(DEATH_SPEED)).replace("{1}", str(BIRTH_COST)).replace("{2}", str(MAX_GENES)), "ca_step", preamble=""" #include <stdio.h> #define EXISTS(x) (x > 0 ? 1 : 0) //#define FIT(x, n) ((n == 0 || (x & (1 << (n - 1))) == 0) ? 0 : 1) #define FIT(x, n) ((x >> (n - 1)) & 1) __device__ uint fadeout(int val, int step) { uint red = (val & 0x00ff0000) >> 16; if (red > step-1) red -= step; else red = 0; uint green = (val & 0x0000ff00) >> 8; if (green > step-1) green -= step; else green = 0; uint blue = (val & 0x000000ff); if (blue > step-1) blue -= step; else blue = 0; return blue + (green << 8) + (red << 16); } __device__ uint hsv2rgb(int hue, int sat, int val) { float r, g, b; float h, s, v; h = hue; s = fmin(255, (float) sat); s /= 255; v = fmin(255, (float) val); float f = ((float) h) / 60.0f; float hi = floorf(f); f = f - hi; int p = (int) (v * (1 - s)); int q = (int) (v * (1 - s * f)); int t = (int) (v * (1 - s * (1 - f))); if(hi == 0.0f || hi == 6.0f) { r = v; g = t; b = p; } else if (hi == 1.0f) { r = q; g = v; b = p; } else if (hi == 2.0f) { r = p; g = v; b = t; } else if (hi == 3.0f) { r = p; g = q; b = v; } else if (hi == 4.0f) { r = t; g = p; b = v; } else { r = v; g = p; b = q; } unsigned int color = b + g * 256 + r * 256 * 256; return color; } """) class EvoLife: def __init__(self, width=0, height=0, fullscreen=False, saveframes=False, downscale_factor=1, frame_skip=1): print "Initializing PyGame...", pygame.init() self.title = 'EvoLife Cellular Automaton /w CUDA' self.saveframes = saveframes self.downscale_factor = downscale_factor self.movie_frame = 0 pygame.display.set_caption(self.title, 'CUDA Life') modes = pygame.display.list_modes() modes.sort() modes.reverse() self.width = width if width else modes[0][0] self.height = height if height else modes[0][1] self.frame_skip = frame_skip print "done." print "Initializing GPU stuff...", if RANDOM_SEED: random.seed(RANDOM_SEED) seeds = np.asarray([[random.randint(0, 16) for j in range(self.height)] for i in range(self.width)]).astype(np.int32) fld = fld_init(self) self.f1_gpu = gpuarray.to_gpu(fld) self.f2_gpu = gpuarray.to_gpu(fld.copy()) self.seeds_gpu = gpuarray.to_gpu(seeds) self.img_gpu = gpuarray.to_gpu(np.asarray([[0 for v in row] for row in fld]).astype(np.int32)) print "done." print "Initializing display...", self.srf = pygame.display.set_mode((self.width / self.downscale_factor, self.height / self.downscale_factor)) if fullscreen: pygame.display.toggle_fullscreen() print "done: %sx%s." % (self.width / self.downscale_factor, self.height / self.downscale_factor) self.t = 0 self.zoom = 1 self.dx = 0 self.dy = 0 self.last_checked = time.time() self.last_t = 0 def genome2str(self, g): f = "" for i in xrange(8): if ((1 << i) & g) != 0: f += str(i+1) f += "/" g = g >> 8 for i in xrange(9): if ((1 << i) & g) != 0: f += str(i) return f def str2genome(self, s): g = 0 b, s = s.split("/") for i in b: g += (1 << (int(i)-1)) for i in s: g += (1 << (int(i)+8)) return g def species_chart(self): world = self.f1_gpu.get() species = np.unique(world & 0x1ffff, return_counts=True) species = zip(species[1][1:], species[0][1:]) species.sort() species.reverse() print "SN=%s |" % len(species), for s in species[:10]: print "%s (%s) |" % (self.genome2str(s[1]), s[0]), print def step(self): start_time = time.time() step_gpu(self.f1_gpu, self.f2_gpu, self.seeds_gpu, self.img_gpu, np.uint32(self.width), np.uint32(self.height)) tmp = self.f1_gpu self.f1_gpu = self.f2_gpu self.f2_gpu = tmp self.t += 1 self.last_t += 1 if self.t % self.frame_skip == 0: dest = self.img_gpu.get() dest = np.reshape(dest, (self.width, self.height), order='F') if self.dx: dest = np.roll(dest, self.dx, axis=1) if self.dy: dest = np.roll(dest, self.dy, axis=0) if self.zoom > 1: dest = dest[:self.width // self.zoom + 1, :self.height // self.zoom + 1] dest = dest.repeat(self.zoom, axis=0).repeat(self.zoom, axis=1) dest = dest[:self.width, :self.height] if self.downscale_factor != 1: dest = dest.view(np.uint8).reshape(dest.shape+(4,))[..., :3] dest = (tf.resize(dest, (self.width / self.downscale_factor, self.height / self.downscale_factor, 3), order=1) * 255).astype(np.int32) tmp = dest[:,:,0].copy() dest[:,:,0] = dest[:,:,2] dest[:,:,2] = tmp if self.saveframes: pygame.image.save(self.srf, "movie/frame%s.png" % str(self.movie_frame).zfill(8)) self.movie_frame += 1 pygame.surfarray.blit_array(self.srf, dest) pygame.display.update() if self.t % 100 == 0: self.species_chart() end_time = time.time() if end_time - self.last_checked > 1: elapsed_time = end_time - self.last_checked pygame.display.set_caption(self.title + " | Step %s: %.2f steps/s @%sx" % (self.t, float(self.last_t) / elapsed_time, self.frame_skip), 'CUDA EvoLife') self.last_checked = time.time() self.last_t = 0 def run(self): while True: self.step() events = pygame.event.get() need_exit = False for e in events: if e.type==QUIT or e.type==KEYDOWN and e.key==K_ESCAPE or e.type==KEYDOWN and e.key==K_q: need_exit = True break if e.type==KEYDOWN: if e.key==K_KP_PLUS or e.key==K_EQUALS: self.zoom *= 2 if e.key==K_MINUS or e.key==K_KP_MINUS: self.zoom = max(1, self.zoom / 2) if e.key==K_RIGHTBRACKET: self.frame_skip += 5 if e.key==K_LEFTBRACKET: self.frame_skip = max(1, self.frame_skip - 5) if e.key==K_UP: self.dx += 10 if e.key==K_DOWN: self.dx -= 10 if e.key==K_LEFT: self.dy += 10 if e.key==K_RIGHT: self.dy -= 10 if e.key==K_f: pygame.display.toggle_fullscreen() if e.key==K_s: np.save("fields/field.npy", self.f1_gpu.get()) if need_exit: break if __name__ == '__main__': ca = EvoLife(FIELD_WIDTH, FIELD_HEIGHT, saveframes=SAVE_FRAMES, downscale_factor=DOWNSCALE_FACTOR, frame_skip=FRAME_SKIP) ca.run()
a5kin/evolife
evolife.py
Python
mit
16,479
0.006554
# python # This file is generated by a program (mib2py). import HP_SN_IGMP_MIB OIDMAP = { '1.3.6.1.4.1.11.2.3.7.11.12.2.6.1': HP_SN_IGMP_MIB.snIgmpMIBObjects, '1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.1': HP_SN_IGMP_MIB.snIgmpQueryInterval, '1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.2': HP_SN_IGMP_MIB.snIgmpGroupMembershipTime, '1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.3.1.1': HP_SN_IGMP_MIB.snIgmpIfEntryIndex, '1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.3.1.2': HP_SN_IGMP_MIB.snIgmpIfPortNumber, '1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.3.1.3': HP_SN_IGMP_MIB.snIgmpIfGroupAddress, '1.3.6.1.4.1.11.2.3.7.11.12.2.6.1.3.1.4': HP_SN_IGMP_MIB.snIgmpIfGroupAge, }
xiangke/pycopia
mibs/pycopia/mibs/HP_SN_IGMP_MIB_OID.py
Python
lgpl-2.1
626
0.01278
from django.contrib import admin from models import Page, TPage, Content, TContent class PageAdmin(admin.ModelAdmin): exclude = ['posted'] #fields = ['posted', 'title'] list_display = ('title', 'posted', 'slug') prepopulated_fields = {'slug': ('title',)} class TPageAdmin(admin.ModelAdmin): list_display = ('title', 'language', 'page') #prepopulated_fields = {'slug': ('title',)} class ContentAdmin(admin.ModelAdmin): exclude = ['posted'] #fields = ['posted', 'title'] list_display = ('code', 'posted', 'page') prepopulated_fields = {'slug': ('code',)} class TContentAdmin(admin.ModelAdmin): #change_form_template = 'page/admin/change_form.html' list_display = ('content', 'language', 'page') #prepopulated_fields = {'slug': ('title',)} admin.site.register(Page, PageAdmin) admin.site.register(TPage, TPageAdmin) admin.site.register(Content, ContentAdmin) admin.site.register(TContent, TContentAdmin)
vollov/isda.ca
isda_backend/page/admin.py
Python
mit
962
0.011435
from setuptools import setup, find_packages from codecs import open from os import path here = path.abspath(path.dirname(__file__)) def readfile(fn): """Read fn and return the contents.""" with open(path.join(here, fn), "r", encoding="utf-8") as f: return f.read() setup( name="usfm2osis", packages=find_packages(exclude=["tests*"]), version="0.6.1", description="Tools for converting Bibles from USFM to OSIS XML", author="Christopher C. Little", author_email="chrisclittle+usfm2osis@gmail.com", url="https://github.com/chrislit/usfm2osis", download_url="https://github.com/chrislit/usfm2osis/archive/master.zip", keywords=["OSIS", "USFM", "Bible"], license="GPLv3+", zip_safe=False, classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Development Status :: 4 - Beta", "License :: OSI Approved :: GNU General Public License v3 or later \ (GPLv3+)", "Operating System :: OS Independent", "Natural Language :: English", "Intended Audience :: Religion", "Intended Audience :: Developers", "Topic :: Religion", "Topic :: Text Processing :: Markup :: XML", ], long_description="\n\n".join([readfile(f) for f in ("README.rst",)]), # scripts=['scripts/usfm2osis', 'scripts/usfmtags'], package_data={"usfm2osis": ["schemas/*.xsd"]}, entry_points={ "console_scripts": [ "usfm2osis = usfm2osis.scripts.usfm2osis:main", "usfmtags = usfm2osis.scripts.usfmtags:main", ] }, )
chrislit/usfm2osis
setup.py
Python
gpl-3.0
1,809
0
import heapq import os import numpy from smqtk.algorithms.nn_index.hash_index import HashIndex from smqtk.utils.bit_utils import ( bit_vector_to_int_large, int_to_bit_vector_large, ) from smqtk.utils.metrics import hamming_distance __author__ = "paul.tunison@kitware.com" class LinearHashIndex (HashIndex): """ Basic linear index using heap sort (aka brute force). """ @classmethod def is_usable(cls): return True def __init__(self, file_cache=None): """ Initialize linear, brute-force hash index :param file_cache: Optional path to a file to cache our index to. :type file_cache: str """ super(LinearHashIndex, self).__init__() self.file_cache = file_cache self.index = numpy.array([], bool) self.load_cache() def get_config(self): return { 'file_cache': self.file_cache, } def load_cache(self): """ Load from file cache if we have one """ if self.file_cache and os.path.isfile(self.file_cache): self.index = numpy.load(self.file_cache) def save_cache(self): """ save to file cache if configures """ if self.file_cache: numpy.save(self.file_cache, self.index) def count(self): return len(self.index) def build_index(self, hashes): """ Build the index with the give hash codes (bit-vectors). Subsequent calls to this method should rebuild the index, not add to it, or raise an exception to as to protect the current index. :raises ValueError: No data available in the given iterable. :param hashes: Iterable of descriptor elements to build index over. :type hashes: collections.Iterable[numpy.ndarray[bool]] """ new_index = numpy.array(map(bit_vector_to_int_large, hashes)) if not new_index.size: raise ValueError("No hashes given to index.") self.index = new_index self.save_cache() def nn(self, h, n=1): """ Return the nearest `N` neighbors to the given hash code. Distances are in the range [0,1] and are the percent different each neighbor hash is from the query, based on the number of bits contained in the query. :param h: Hash code to compute the neighbors of. Should be the same bit length as indexed hash codes. :type h: numpy.ndarray[bool] :param n: Number of nearest neighbors to find. :type n: int :raises ValueError: No index to query from. :return: Tuple of nearest N hash codes and a tuple of the distance values to those neighbors. :rtype: (tuple[numpy.ndarray[bool], tuple[float]) """ super(LinearHashIndex, self).nn(h, n) h_int = bit_vector_to_int_large(h) bits = len(h) #: :type: list[int|long] near_codes = \ heapq.nsmallest(n, self.index, lambda e: hamming_distance(h_int, e) ) distances = map(hamming_distance, near_codes, [h_int] * len(near_codes)) return [int_to_bit_vector_large(c, bits) for c in near_codes], \ [d / float(bits) for d in distances]
Purg/SMQTK
python/smqtk/algorithms/nn_index/hash_index/linear.py
Python
bsd-3-clause
3,373
0
from django.conf.urls import patterns, url from django.contrib.auth import views as auth_views urlpatterns = patterns('', url(r'^login/$', auth_views.login, {'template_name': 'authstrap/login.html'}, name='auth_login'), url(r'^logout/$', auth_views.logout, {'template_name': 'authstrap/logout.html'}, name='auth_logout'), url(r'^password/change/$', auth_views.password_change, {'template_name': 'authstrap/password_change_form.html'}, name='auth_password_change'), url(r'^password/change/done/$', auth_views.password_change_done, {'template_name': 'authstrap/password_change_done.html'}, name='auth_password_change_done'), url(r'^password/reset/$', auth_views.password_reset, {'template_name': 'authstrap/password_reset_form.html'}, name='auth_password_reset'), url(r'^password/reset/confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$', auth_views.password_reset_confirm, {'template_name': 'authstrap/password_reset_confirm.html'}, name='auth_password_reset_confirm'), url(r'^password/reset/complete/$', auth_views.password_reset_complete, {'template_name': 'authstrap/password_reset_complete.html'}, name='auth_password_reset_complete'), url(r'^password/reset/done/$', auth_views.password_reset_done, {'template_name': 'authstrap/password_change_done.html'}, name='auth_password_reset_done'), )
alixedi/django_authstrap
authstrap/urls.py
Python
bsd-3-clause
1,659
0.002411
from __future__ import absolute_import from .base import * # NOQA from .registry import RuleRegistry # NOQA def init_registry(): from sentry.constants import _SENTRY_RULES from sentry.plugins.base import plugins from sentry.utils.imports import import_string from sentry.utils.safe import safe_execute registry = RuleRegistry() for rule in _SENTRY_RULES: cls = import_string(rule) registry.add(cls) for plugin in plugins.all(version=2): for cls in safe_execute(plugin.get_rules, _with_transaction=False) or (): registry.add(cls) return registry rules = init_registry()
beeftornado/sentry
src/sentry/rules/__init__.py
Python
bsd-3-clause
646
0.001548
class Solution(object): def merge(self, nums1, m, nums2, n): """ :type nums1: List[int] :type m: int :type nums2: List[int] :type n: int :rtype: void Do not return anything, modify nums1 in-place instead. """ last,i,j = m+n-1,m-1,n-1 while i >= 0 and j >= 0: if nums1[i] > nums2[j]: nums1[last] = nums1[i] last,i = last-1,i-1 else: nums1[last] = nums2[j] last,j = last-1,j-1 while j >= 0: nums1[last] = nums2[j] last,j = last-1,j-1
hufeiya/leetcode
python/88_Merge_Sorted_Array.py
Python
gpl-2.0
628
0.015924
#=========================================================================== # mjpegger.py # # Runs a MJPG stream on provided port. # # 2016-07-25 # Carter Nelson #=========================================================================== import threading import SimpleHTTPServer import SocketServer import io keepStreaming = False camera = None resize = (640,360) class MJPEGThread(threading.Thread): """Thread to server MJPEG stream.""" def __init__(self, group=None, target=None, name=None, args=(), kwargs=None): threading.Thread.__init__(self, group=group, target=target, name=name) global camera, resize camera = kwargs['camera'] resize = kwargs['resize'] self.port = kwargs['port'] self.keepRunnning = False self.streamRunning = False self.server = None def run(self, ): print "MJPEGThread starting" self.server = SocketServer.TCPServer(("",self.port), MJPEGStreamHandler, bind_and_activate=False) self.server.allow_reuse_address = True self.server.timeout = 0.1 self.server.server_bind() self.server.server_activate() self.keepRunning = True self.streamRunning = True while self.keepRunning: self.server.handle_request() self.streamRunning = False camera.close() self.server.server_close() print "MJPEGThread done" def stop(self, ): global keepStreaming keepStreaming = False self.keepRunning = False class MJPEGStreamHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): """Handler for MJPEG stream.""" def do_GET(self, ): print "MJPEGStreamHandler GET" global keepStreaming keepStreaming = True stream = io.BytesIO() self.send_response(200) self.send_header('Content-type','multipart/x-mixed-replace; boundary=--picameramjpg') self.end_headers() for frame in camera.capture_continuous(stream, 'jpeg', use_video_port = True, resize = resize): if not keepStreaming: break self.wfile.write("--picameramjpg") self.send_header('Content-type','image/jpeg') self.send_header('Content-length',len(stream.getvalue())) self.end_headers() self.wfile.write(stream.getvalue()) stream.seek(0) stream.truncate()
caternuson/rpi-laser
mjpegger.py
Python
mit
2,603
0.011909
from test.classes.ball_detector.BallDetector import BallDetector from test.classes.ball_detector.BounceCalculator import BounceCalculator from test.classes.ball_detector.Extrapolator import Extrapolator from test.classes.utils.Ball import Ball from test.classes.utils.BallHistory import BallHistory VERTICAL_THRESHOLD = 10 class BallTracker: def __init__(self): self.track_history = BallHistory() self.ball_detector = None self.bounce_calculator = BounceCalculator() self.extrapolator = Extrapolator() def first_frame(self, first_frame): self.ball_detector = BallDetector(first_frame) def track(self, frame): found_ball = self.ball_detector.detect(frame) if found_ball.is_none(): found_ball = self.extrapolator.extrapolate(self.track_history) # Remove vertical movement logic # If we have no one to compare to, cannot detect vertical movement if len(self.track_history) == 0 or self.track_history[-1].is_none() or found_ball.is_none(): self.track_history.update_history(found_ball) else: # If we have someone to compare to, look if x coordinates have changed enough if abs(self.track_history[-1].center[0] - found_ball.center[0]) < VERTICAL_THRESHOLD: self.track_history.update_history(Ball()) else: self.track_history.update_history(found_ball) return found_ball def get_bounce(self): bounce = self.bounce_calculator.find_bounce(self.track_history) if not bounce.is_none(): # Bounces are detected after happening after_bounce_ball = self.track_history[-1] self.track_history.clear_history() self.track_history.update_history(after_bounce_ball) # We don't care about outside bounces if not self.ball_detector.is_inside_table(bounce): return Ball() return bounce def clear(self): self.track_history.clear_history() self.ball_detector.clear()
Centre-Alt-Rendiment-Esportiu/att
src/python/test/classes/BallTracker.py
Python
gpl-3.0
2,078
0.001444
#!/usr/bin/python3 # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: from typing import Dict, Union, TypeVar, Iterable, Callable, Tuple,\ Optional from math import log, ceil import gdb import crash from crash.util import find_member_variant from crash.util.symbols import Types, Symvals, TypeCallbacks from crash.util.symbols import SymbolCallbacks, MinimalSymbolCallbacks from crash.cache.syscache import config from crash.exceptions import DelayedAttributeError #TODO debuginfo won't tell us, depends on version? PAGE_MAPPING_ANON = 1 types = Types(['unsigned long', 'struct page', 'enum pageflags', 'enum zone_type', 'struct mem_section']) symvals = Symvals(['mem_section', 'max_pfn']) PageType = TypeVar('PageType', bound='Page') class Page: slab_cache_name = None slab_page_name = None compound_head_name = None vmemmap_base = 0xffffea0000000000 vmemmap: gdb.Value directmap_base = 0xffff880000000000 pageflags: Dict[str, int] = dict() PG_tail = -1 PG_slab = -1 PG_lru = -1 setup_page_type_done = False setup_pageflags_done = False setup_pageflags_finish_done = False ZONES_WIDTH = -1 NODES_WIDTH = -1 # TODO have arch provide this? BITS_PER_LONG = -1 PAGE_SIZE = 4096 PAGE_SHIFT = 12 sparsemem = False SECTION_SIZE_BITS = -1 # Depends on sparsemem=True SECTIONS_PER_ROOT = -1 # Depends on SPARSEMEM_EXTREME _is_tail: Callable[['Page'], bool] _compound_head: Callable[['Page'], int] @classmethod def setup_page_type(cls, gdbtype: gdb.Type) -> None: # TODO: should check config, but that failed to work on ppc64, hardcode # 64k for now if crash.current_target().arch.name() == "powerpc:common64": cls.PAGE_SHIFT = 16 # also a config cls.directmap_base = 0xc000000000000000 cls.sparsemem = True cls.SECTION_SIZE_BITS = 24 cls.PAGE_SIZE = 1 << cls.PAGE_SHIFT cls.slab_cache_name = find_member_variant(gdbtype, ['slab_cache', 'lru']) cls.slab_page_name = find_member_variant(gdbtype, ['slab_page', 'lru']) cls.compound_head_name = find_member_variant(gdbtype, ['compound_head', 'first_page']) if not hasattr(cls, 'vmemmap'): cls.vmemmap = gdb.Value(cls.vmemmap_base).cast(gdbtype.pointer()) cls.setup_page_type_done = True if cls.setup_pageflags_done and not cls.setup_pageflags_finish_done: cls.setup_pageflags_finish() @classmethod def setup_mem_section(cls, gdbtype: gdb.Type) -> None: # TODO assumes SPARSEMEM_EXTREME cls.SECTIONS_PER_ROOT = cls.PAGE_SIZE // gdbtype.sizeof @classmethod def pfn_to_page(cls, pfn: int) -> gdb.Value: if cls.sparsemem: section_nr = pfn >> (cls.SECTION_SIZE_BITS - cls.PAGE_SHIFT) root_idx = section_nr // cls.SECTIONS_PER_ROOT offset = section_nr & (cls.SECTIONS_PER_ROOT - 1) section = symvals.mem_section[root_idx][offset] pagemap = section["section_mem_map"] & ~3 return (pagemap.cast(types.page_type.pointer()) + pfn).dereference() # pylint doesn't have the visibility it needs to evaluate this # pylint: disable=unsubscriptable-object return cls.vmemmap[pfn] @classmethod def setup_pageflags(cls, gdbtype: gdb.Type) -> None: for field in gdbtype.fields(): cls.pageflags[field.name] = field.enumval cls.setup_pageflags_done = True if cls.setup_page_type_done and not cls.setup_pageflags_finish_done: cls.setup_pageflags_finish() cls.PG_slab = 1 << cls.pageflags['PG_slab'] cls.PG_lru = 1 << cls.pageflags['PG_lru'] @classmethod def setup_vmemmap_base(cls, symbol: gdb.Symbol) -> None: cls.vmemmap_base = int(symbol.value()) # setup_page_type() was first and used the hardcoded initial value, # we have to update cls.vmemmap = gdb.Value(cls.vmemmap_base).cast(types.page_type.pointer()) @classmethod def setup_directmap_base(cls, symbol: gdb.Symbol) -> None: cls.directmap_base = int(symbol.value()) @classmethod def setup_zone_type(cls, gdbtype: gdb.Type) -> None: max_nr_zones = gdbtype['__MAX_NR_ZONES'].enumval cls.ZONES_WIDTH = int(ceil(log(max_nr_zones, 2))) @classmethod # pylint: disable=unused-argument def setup_nodes_width(cls, symbol: Union[gdb.Symbol, gdb.MinSymbol]) -> None: """ Detect NODES_WITH from the in-kernel config table Args: symbol: The ``kernel_config_data`` symbol or minimal symbol. It is not used directly. It is used to determine whether the config data should be available. """ # TODO: handle kernels with no space for nodes in page flags try: cls.NODES_WIDTH = int(config['NODES_SHIFT']) except (KeyError, DelayedAttributeError): # XXX print("Unable to determine NODES_SHIFT from config, trying 8") cls.NODES_WIDTH = 8 # piggyback on this callback because type callback doesn't seem to work # for unsigned long cls.BITS_PER_LONG = types.unsigned_long_type.sizeof * 8 @classmethod def setup_pageflags_finish(cls) -> None: cls.setup_pageflags_finish_done = True cls._is_tail = cls.__is_tail_compound_head_bit cls._compound_head = cls.__compound_head_uses_low_bit if 'PG_tail' in cls.pageflags.keys(): cls.PG_tail = 1 << cls.pageflags['PG_tail'] cls._is_tail = cls.__is_tail_flag if cls.compound_head_name == 'first_page': cls._compound_head = cls.__compound_head_first_page if cls.PG_tail == -1: cls.PG_tail = 1 << cls.pageflags['PG_compound'] | 1 << cls.pageflags['PG_reclaim'] cls._is_tail = cls.__is_tail_flagcombo @classmethod def from_obj(cls, page: gdb.Value) -> 'Page': pfn = (int(page.address) - Page.vmemmap_base) // types.page_type.sizeof return Page(page, pfn) @classmethod def from_page_addr(cls, addr: int) -> 'Page': page_ptr = gdb.Value(addr).cast(types.page_type.pointer()) return cls.from_obj(page_ptr.dereference()) def __init__(self, obj: gdb.Value, pfn: int) -> None: self.gdb_obj = obj self.address = int(obj.address) self.pfn = pfn self.flags = int(obj["flags"]) def __is_tail_flagcombo(self) -> bool: return bool((self.flags & self.PG_tail) == self.PG_tail) def __is_tail_flag(self) -> bool: return bool(self.flags & self.PG_tail) def __is_tail_compound_head_bit(self) -> bool: return bool(self.gdb_obj['compound_head'] & 1) def is_tail(self) -> bool: return self._is_tail() def is_slab(self) -> bool: return bool(self.flags & self.PG_slab) def is_lru(self) -> bool: return bool(self.flags & self.PG_lru) def is_anon(self) -> bool: mapping = int(self.gdb_obj["mapping"]) return (mapping & PAGE_MAPPING_ANON) != 0 def get_slab_cache(self) -> gdb.Value: if Page.slab_cache_name == "lru": return self.gdb_obj["lru"]["next"] return self.gdb_obj[Page.slab_cache_name] def get_slab_page(self) -> gdb.Value: if Page.slab_page_name == "lru": return self.gdb_obj["lru"]["prev"] return self.gdb_obj[Page.slab_page_name] def get_nid(self) -> int: # TODO: this only works when there are no sections (i.e. sparsemem_vmemmap) return self.flags >> (self.BITS_PER_LONG - self.NODES_WIDTH) def get_zid(self) -> int: shift = self.BITS_PER_LONG - self.NODES_WIDTH - self.ZONES_WIDTH zid = self.flags >> shift & ((1 << self.ZONES_WIDTH) - 1) return zid def __compound_head_first_page(self) -> int: return int(self.gdb_obj['first_page']) def __compound_head_uses_low_bit(self) -> int: return int(self.gdb_obj['compound_head']) - 1 def __compound_head(self) -> int: return self._compound_head() def compound_head(self) -> 'Page': if not self.is_tail(): return self return self.__class__.from_page_addr(self.__compound_head()) type_cbs = TypeCallbacks([('struct page', Page.setup_page_type), ('enum pageflags', Page.setup_pageflags), ('enum zone_type', Page.setup_zone_type), ('struct mem_section', Page.setup_mem_section)]) msymbol_cbs = MinimalSymbolCallbacks([('kernel_config_data', Page.setup_nodes_width)]) # TODO: this should better be generalized to some callback for # "config is available" without refering to the symbol name here symbol_cbs = SymbolCallbacks([('vmemmap_base', Page.setup_vmemmap_base), ('page_offset_base', Page.setup_directmap_base)]) def page_addr(struct_page_addr: int) -> int: pfn = (struct_page_addr - Page.vmemmap_base) // types.page_type.sizeof return Page.directmap_base + (pfn * Page.PAGE_SIZE) def pfn_to_page(pfn: int) -> 'Page': return Page(Page.pfn_to_page(pfn), pfn) def page_from_addr(addr: int) -> 'Page': pfn = (addr - Page.directmap_base) // Page.PAGE_SIZE return pfn_to_page(pfn) def safe_page_from_page_addr(addr: int) -> Optional[Page]: if addr < Page.vmemmap_base: return None pfn = (addr - Page.vmemmap_base) // types.page_type.sizeof if pfn > int(symvals.max_pfn): return None return Page.from_page_addr(addr) def page_from_gdb_obj(gdb_obj: gdb.Value) -> 'Page': pfn = (int(gdb_obj.address) - Page.vmemmap_base) // types.page_type.sizeof return Page(gdb_obj, pfn) def for_each_struct_page_pfn() -> Iterable[Tuple[gdb.Value, int]]: # TODO works only on x86? max_pfn = int(symvals.max_pfn) for pfn in range(max_pfn): try: yield (Page.pfn_to_page(pfn), pfn) except gdb.error: # TODO: distinguish pfn_valid() and report failures for those? pass def for_each_page() -> Iterable[Page]: # TODO works only on x86? max_pfn = int(symvals.max_pfn) for pfn in range(max_pfn): try: yield pfn_to_page(pfn) except gdb.error: # TODO: distinguish pfn_valid() and report failures for those? pass # Optimized to filter flags on gdb.Value before instantiating Page def for_each_page_flag(flag: int) -> Iterable[Page]: for (struct_page, pfn) in for_each_struct_page_pfn(): try: if struct_page["flags"] & flag == 0: continue yield Page(struct_page, pfn) except gdb.error: pass
jeffmahoney/crash-python
crash/types/page.py
Python
gpl-2.0
10,946
0.001827
"""Tests for tensorflow.ops.tf.Cholesky.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow.python.platform import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf class CholeskyOpTest(tf.test.TestCase): def _verifyCholesky(self, x): with self.test_session() as sess: # Verify that LL^T == x. if x.ndim == 2: chol = tf.cholesky(x) verification = tf.matmul(chol, chol, transpose_a=False, transpose_b=True) else: chol = tf.batch_cholesky(x) verification = tf.batch_matmul(chol, chol, adj_x=False, adj_y=True) chol_np, verification_np = sess.run([chol, verification]) self.assertAllClose(x, verification_np) self.assertShapeEqual(x, chol) # Check that the cholesky is lower triangular, and has positive diagonal # elements. if chol_np.shape[-1] > 0: chol_reshaped = np.reshape(chol_np, (-1, chol_np.shape[-2], chol_np.shape[-1])) for chol_matrix in chol_reshaped: self.assertAllClose(chol_matrix, np.tril(chol_matrix)) self.assertTrue((np.diag(chol_matrix) > 0.0).all()) def testBasic(self): self._verifyCholesky(np.array([[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]])) def testBatch(self): simple_array = np.array([[[1., 0.], [0., 5.]]]) # shape (1, 2, 2) self._verifyCholesky(simple_array) self._verifyCholesky(np.vstack((simple_array, simple_array))) odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]]) self._verifyCholesky(np.vstack((odd_sized_array, odd_sized_array))) # Generate random positive-definite matrices. matrices = np.random.rand(10, 5, 5) for i in xrange(10): matrices[i] = np.dot(matrices[i].T, matrices[i]) self._verifyCholesky(matrices) def testNonSquareMatrix(self): with self.assertRaises(ValueError): tf.cholesky(np.array([[1., 2., 3.], [3., 4., 5.]])) def testWrongDimensions(self): tensor3 = tf.constant([1., 2.]) with self.assertRaises(ValueError): tf.cholesky(tensor3) def testNotInvertible(self): # The input should be invertible. with self.test_session(): with self.assertRaisesOpError("LLT decomposition was not successful. The " "input might not be valid."): # All rows of the matrix below add to zero self._verifyCholesky(np.array([[1., -1., 0.], [-1., 1., -1.], [0., -1., 1.]])) def testEmpty(self): self._verifyCholesky(np.empty([0, 2, 2])) self._verifyCholesky(np.empty([2, 0, 0])) if __name__ == "__main__": tf.test.main()
arunhotra/tensorflow
tensorflow/python/kernel_tests/cholesky_op_test.py
Python
apache-2.0
2,904
0.006543
# html2PraatMan - Version 1.0 - October 16, 2013 # Batch html-to-ManPages converter for Praat documentation # Copyright (C) 2013 Charalampos Karypidis # Email: ch.karypidis@gmail.com # http://addictiveknowledge.blogspot.com/ ############################## ############################## # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ############################################ ############################################ from bs4 import BeautifulSoup import string, os ############################################ def doubleQuotes(s, number=1): """ .............""" return "\""*number + s + "\""*number ############################################ def bold(s): return "##" + s + "#" ############################################ def italics(s): # listWords = string.split(s) # if len(listWords) == 1: # return "%%" + listWords[0] + "%" # else: # for x in range(0,len(listWords)): # listWords[x] = "%%" + listWords[x] # return string.join(listWords) return "%%" + s + "%" ############################################ def monospace(s): return "$$" + s + "$" ############################################ def subscript(s): return "__" + s + "_" ############################################ def superscript(s): return "^^" + s + "^" ############################################ def link(s): target = s['href'] filenameOnly = target.split('.')[0] extension = target.split('.')[1] linkText = s.string audioExtension = ['wav', 'aiff', 'aifc', 'au', 'nist', 'flac', 'mp3'] if extension == "man": return "@@" + filenameOnly + "|" + linkText + "@" elif extension == "praat": if s['alt']: args = string.split(s['alt'], "|") for x in range(0,len(args)): args[x] = doubleQuotes(args[x],2) argsStr = string.join(args, " ") return "@@\\SC" + doubleQuotes(target,2) + " " + argsStr + " " + "|" + linkText + "@" else: return "@@\\SC" + doubleQuotes(target,2) + " " + "|" + linkText + "@" elif extension in audioExtension: return "@@\\FI" + target + " " + "|" + linkText + "@" ############################################ allFiles = [] htmlList = [] for (dirpath, dirnames, filenames) in os.walk(os.getcwd()): allFiles.extend(filenames) for x in range(0,len(allFiles)-1): if allFiles[x].endswith("html",len(allFiles[x])-4): htmlList.append(allFiles[x]) for inputFilename in htmlList: input = BeautifulSoup(open(inputFilename)) address = input.address.string addressCleaned = address.strip() addressComps = addressCleaned.split('\n') intro = input.cite.string if len(addressComps) == 3: recordTime = addressComps[2] else: recordTime = '0' intro = input.cite.string ##################################### outputFilename = inputFilename.split('.')[0] + ".man" output = open(outputFilename,"w") ########################### output.write("ManPagesTextFile\n") output.write(doubleQuotes(inputFilename.split('.')[0].capitalize()) + " " + doubleQuotes(addressComps[0]) + " " + addressComps[1] + " " + addressComps[2] + '\n') for child in input.body: if child.name is not None: if child.name == "cite": listChildren = [] for x in child: if x.name == "b": temp = bold(x.string) listChildren.append(temp) elif x.name == "a": temp = link(x) listChildren.append(temp) elif x.name == "i": temp = italics(x.string) listChildren.append(temp) elif x.name == "kbd": temp = monospace(x.string) listChildren.append(temp) elif x.name == "sub": temp = subscript(x.string) listChildren.append(temp) elif x.name == "sup": temp = superscript(x.string) listChildren.append(temp) else: listChildren.append(str(x)) output.write("<intro> " + doubleQuotes(string.join(listChildren, '')) + '\n') elif child.name == "h1": listChildren = [] for x in child: if x.name == "b": temp = bold(x.string) listChildren.append(temp) elif x.name == "a": temp = link(x) listChildren.append(temp) elif x.name == "i": temp = italics(x.string) listChildren.append(temp) elif x.name == "kbd": temp = monospace(x.string) listChildren.append(temp) elif x.name == "sub": temp = subscript(x.string) listChildren.append(temp) elif x.name == "sup": temp = superscript(x.string) listChildren.append(temp) else: listChildren.append(str(x)) output.write("<entry> " + doubleQuotes(string.join(listChildren, '')) + '\n') elif child.name == "blockquote": output.write("<definition> " + doubleQuotes(child.string) + '\n') elif child.name == "p": listChildren = [] for x in child: if x.name == "b": temp = bold(x.string) listChildren.append(temp) elif x.name == "a": temp = link(x) listChildren.append(temp) elif x.name == "i": temp = italics(x.string) listChildren.append(temp) elif x.name == "kbd": temp = monospace(x.string) listChildren.append(temp) elif x.name == "sub": temp = subscript(x.string) listChildren.append(temp) elif x.name == "sup": temp = superscript(x.string) listChildren.append(temp) else: listChildren.append(str(x)) output.write("<normal> " + doubleQuotes(string.join(listChildren, '')) + '\n') elif child.name == "address": continue elif child.name == "ul": if child.get('class') == ["noBullet"]: # if child.get('class'): for item in child.find_all("li"): listChildren = [] for x in item: if x.name == "b": temp = bold(x.string) listChildren.append(temp) elif x.name == "a": temp = link(x) listChildren.append(temp) elif x.name == "i": temp = italics(x.string) listChildren.append(temp) elif x.name == "kbd": temp = monospace(x.string) listChildren.append(temp) elif x.name == "sub": temp = subscript(x.string) listChildren.append(temp) elif x.name == "sup": temp = superscript(x.string) listChildren.append(temp) else: listChildren.append(str(x)) output.write("<list_item> " + "\"" + string.join(listChildren, '') + "\"" + '\n') else: for item in child.find_all("li"): listChildren = [] for x in item: if x.name == "b": temp = bold(x.string) listChildren.append(temp) elif x.name == "a": temp = link(x) listChildren.append(temp) elif x.name == "i": temp = italics(x.string) listChildren.append(temp) elif x.name == "kbd": temp = monospace(x.string) listChildren.append(temp) elif x.name == "sub": temp = subscript(x.string) listChildren.append(temp) elif x.name == "sup": temp = superscript(x.string) listChildren.append(temp) else: listChildren.append(str(x)) output.write("<list_item> \"\\bu " + string.join(listChildren, '') + "\"" + '\n') elif child.name == "code": width = child.get('width') height = child.get('height') output.write("<script> " + width + " " + height + " " + doubleQuotes(child.string) + '\n') else: continue output.close()
chkarypidis/html2PraatMan
html2PraatMan.py
Python
gpl-3.0
7,854
0.026993
from unittest import skip from ion.services.dm.test.dm_test_case import DMTestCase from pyon.public import PRED, OT, RT from pyon.util.log import log from ion.services.dm.test.test_dm_end_2_end import DatasetMonitor from ion.services.dm.utility.granule import RecordDictionaryTool from nose.plugins.attrib import attr import numpy as np import calendar from datetime import datetime @attr(group='dm') class TestSiteDataProducts(DMTestCase): def create_device_site_deployment(self, dep_name="Deployment", starting=''): from interface.objects import StreamConfiguration, StreamConfigurationType, InstrumentDevice from interface.objects import InstrumentModel, PlatformAgent, InstrumentSite, TemporalBounds, Deployment from interface.objects import RemotePlatformDeploymentContext stream_conf = StreamConfiguration(stream_name="CTD 1 Parsed Stream", parameter_dictionary_name='ctd_parsed_param_dict', stream_type=StreamConfigurationType.PARSED) pdict_id = self.dataset_management.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict') stream_def_id = self.create_stream_definition(name='CTD 1', parameter_dictionary_id=pdict_id) data_product_id = self.create_data_product(name="DDP_1", stream_def_id=stream_def_id, stream_configuration=stream_conf) self.activate_data_product(data_product_id) dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id) stream_def = self.resource_registry.find_objects(data_product_id, PRED.hasStreamDefinition)[0][0] param_dict = self.resource_registry.find_objects(stream_def._id, PRED.hasParameterDictionary)[0][0] # Add data to the DataProduct dataset_monitor = DatasetMonitor(dataset_id) self.addCleanup(dataset_monitor.stop) rdt = self.ph.get_rdt(stream_def._id) rdt_ = self.ph.rdt_for_data_product(data_product_id) self.assertEquals(rdt.fields, rdt_.fields) rdt['time'] = [0, 1, 2, 3] rdt['temp'] = [10, 11, 12, 13] self.ph.publish_rdt_to_data_product(data_product_id, rdt) self.assertTrue(dataset_monitor.wait()) # Create Device device = InstrumentDevice(name='Device 1') device_id = self.instrument_management.create_instrument_device(device) self.data_acquisition_management.register_instrument(device_id) self.data_acquisition_management.assign_data_product(device_id, data_product_id) # Create Model model = InstrumentModel(name='Model 1') model_id = self.instrument_management.create_instrument_model(model) self.instrument_management.assign_instrument_model_to_instrument_device(model_id, device_id) # Create AgentDefinition ad = PlatformAgent(stream_configurations=[stream_conf]) ad_id, _ = self.resource_registry.create(ad) # Create Site site = InstrumentSite(name='Site 1', stream_configurations=[stream_conf]) site_id, _ = self.resource_registry.create(site) self.resource_registry.create_association(site_id, PRED.hasModel, model_id) self.resource_registry.create_association(site_id, PRED.hasAgentDefinition, ad_id) # TemporalBounds of the Deployment temp_bounds = TemporalBounds(start_datetime=starting, end_datetime='') # Create Deployment deployment = Deployment(name=dep_name, type="RemotePlatform", context=RemotePlatformDeploymentContext(), constraint_list=[temp_bounds]) deployment_id = self.observatory_management.create_deployment(deployment=deployment, site_id=site_id, device_id=device_id) return site_id, device_id, dataset_id, deployment_id, param_dict, data_product_id @attr('PRELOAD') def test_preload_creation(self): from interface.objects import DataProductTypeEnum self.preload_alpha() # IDs from Preload sheets deployment_id = "DEP_BTST_1" site_id = "IS_BTST_SBE37" device_id = "ID_BTST_SBE37" #deployment_id = "DEP_BTST_2" #site_id = "IS_BTST_CTDSIM0" #device_id = "ID_BTST_CTDSIM0" deployment_obj = self.container.resource_registry.find_resources_ext(alt_id=deployment_id, alt_id_ns='PRE')[0][0] site_obj = self.container.resource_registry.find_resources_ext(alt_id=site_id, alt_id_ns='PRE')[0][0] device_obj = self.container.resource_registry.find_resources_ext(alt_id=device_id, alt_id_ns='PRE')[0][0] # Check associations self.assertEquals(self.resource_registry.find_objects(site_obj._id, PRED.hasDevice, id_only=True)[0][0], device_obj._id) self.assertEquals(self.resource_registry.find_objects(site_obj._id, PRED.hasPrimaryDeployment, id_only=True)[0][0], deployment_obj._id) self.assertEquals(self.resource_registry.find_objects(site_obj._id, PRED.hasDeployment, id_only=True)[0][0], deployment_obj._id) self.assertEquals(self.resource_registry.find_objects(device_obj._id, PRED.hasPrimaryDeployment, id_only=True)[0][0], deployment_obj._id) self.assertEquals(self.resource_registry.find_objects(device_obj._id, PRED.withinDeployment, id_only=True)[0][0], deployment_obj._id) self.assertEquals(self.resource_registry.find_objects(device_obj._id, PRED.hasDeployment, id_only=True)[0][0], deployment_obj._id) # stream_name to dataset_id, for lookup later device_stream_names = {} device_data_products, _ = self.resource_registry.find_objects(device_obj._id, PRED.hasOutputProduct) for ddp in device_data_products: stream_def = self.resource_registry.find_objects(ddp._id, PRED.hasStreamDefinition)[0][0] dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(ddp._id) device_stream_names[stream_def.name] = dataset_id site_data_products, _ = self.resource_registry.find_objects(site_obj._id, PRED.hasOutputProduct) for sdp in site_data_products: self.assertEquals(sdp.category, DataProductTypeEnum.SITE) self.assertEquals(len(sdp.dataset_windows), 1) stream_def = self.resource_registry.find_objects(sdp._id, PRED.hasStreamDefinition)[0][0] assert sdp.dataset_windows[0].dataset_id == device_stream_names.get(stream_def.name) assert sdp.dataset_windows[0].bounds.start_datetime == deployment_obj.start_datetime assert sdp.dataset_windows[0].bounds.end_datetime == deployment_obj.end_datetime self.observatory_management.deactivate_deployment(deployment_id=deployment_obj._id) deployment_obj = self.resource_registry.read(deployment_obj._id) for sdp in site_data_products: self.assertEquals(sdp.category, DataProductTypeEnum.SITE) self.assertEquals(len(sdp.dataset_windows), 1) stream_def = self.resource_registry.find_objects(sdp._id, PRED.hasStreamDefinition)[0][0] assert sdp.dataset_windows[0].dataset_id == device_stream_names.get(stream_def.name) assert sdp.dataset_windows[0].bounds.start_datetime == deployment_obj.start_datetime assert sdp.dataset_windows[0].bounds.end_datetime == deployment_obj.end_datetime @attr('INT') def test_primary_deployment(self): # First deployment starting = str(calendar.timegm(datetime(2014, 1, 1, 0).timetuple())) site_1_id, device_1_id, dataset_1_id, deployment_1_id, param_dict_a, data_product_1_id = self.create_device_site_deployment(dep_name="Site 1 - Device 1", starting=starting) self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0]) self.assertEquals([], self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0]) self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0]) self.observatory_management.activate_deployment(deployment_id=deployment_1_id) self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0]) self.assertEquals(deployment_1_id, self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0]) self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0][0]) self.observatory_management.deactivate_deployment(deployment_id=deployment_1_id) self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0]) self.assertEquals([], self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0]) self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0][0]) @attr('INT') @skip("Multiple deployments of the same device are not functional. State transitions need to be looked at.") def test_multiple_deployments(self): from interface.objects import DataProductTypeEnum, TemporalBounds, Deployment, RemotePlatformDeploymentContext # First deployment starting = str(calendar.timegm(datetime(2014, 1, 1, 0).timetuple())) site_1_id, device_1_id, dataset_1_id, deployment_1_id, param_dict_a, data_product_1_id = self.create_device_site_deployment(dep_name="Site 1 - Device 1", starting=starting) site = self.resource_registry.read(site_1_id) # Create SDPs # This logis is also in preload, but testing preload is painful. # Testing it manually here for now. for i, scfg in enumerate(site.stream_configurations): pdict = self.container.resource_registry.find_resources(name=scfg.parameter_dictionary_name, restype=RT.ParameterDictionary, id_only=False)[0][0] # Clone/Create the new ParameterDictionary del pdict._id del pdict._rev sdp_pdict_id, _ = self.container.resource_registry.create(pdict) stream_def_id = self.create_stream_definition(name='CTD 1 - SDP', parameter_dictionary_id=sdp_pdict_id) sdp_id = self.create_data_product(name="SDP_%d" % i, stream_def_id=stream_def_id, stream_configuration=scfg) self.activate_data_product(sdp_id) self.container.resource_registry.create_association(subject=site_1_id, predicate=PRED.hasOutputProduct, object=sdp_id, assoc_type=RT.DataProduct) sdp = self.resource_registry.read(sdp_id) sdp.category = DataProductTypeEnum.SITE self.resource_registry.update(sdp) self.observatory_management.activate_deployment(deployment_id=deployment_1_id) self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0]) self.assertEquals(deployment_1_id, self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0]) self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0][0]) self.observatory_management.deactivate_deployment(deployment_id=deployment_1_id) self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0]) self.assertEquals([], self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0]) self.assertEquals(deployment_1_id, self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0][0]) sdps, _ = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct) for sdp in sdps: self.assertEquals(sdp.category, DataProductTypeEnum.SITE) self.assertEquals(len(sdp.dataset_windows), 1) assert sdp.dataset_windows[0].dataset_id == dataset_1_id assert sdp.dataset_windows[0].bounds.start_datetime == starting assert int(sdp.dataset_windows[0].bounds.end_datetime) - calendar.timegm(datetime.utcnow().timetuple()) < 10 # Second deployment (same site and device) starting2 = str(calendar.timegm(datetime(2014, 1, 5, 0).timetuple())) temp_bounds2 = TemporalBounds(start_datetime=starting2, end_datetime='') deployment_2 = Deployment(name="Site 1 - Device 1 - v2", type="RemotePlatform", context=RemotePlatformDeploymentContext(), constraint_list=[temp_bounds2]) deployment_2_id = self.observatory_management.create_deployment(deployment=deployment_2, site_id=site_1_id, device_id=device_1_id) self.observatory_management.activate_deployment(deployment_id=deployment_2_id) self.assertEquals(deployment_2_id, self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0]) self.assertEquals(deployment_2_id, self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0][0]) self.assertItemsEqual([deployment_1_id, deployment_2_id], self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0]) self.observatory_management.deactivate_deployment(deployment_id=deployment_1_id) self.assertEquals([], self.resource_registry.find_objects(device_1_id, PRED.hasPrimaryDeployment, id_only=True)[0]) self.assertEquals([], self.resource_registry.find_objects(site_1_id, PRED.hasPrimaryDeployment, id_only=True)[0]) self.assertItemsEqual([deployment_1_id, deployment_2_id], self.resource_registry.find_objects(device_1_id, PRED.withinDeployment, id_only=True)[0]) sdps, _ = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct) for sdp in sdps: self.assertEquals(sdp.category, DataProductTypeEnum.SITE) self.assertEquals(len(sdp.dataset_windows), 2) assert sdp.dataset_windows[0].dataset_id == dataset_1_id assert sdp.dataset_windows[0].bounds.start_datetime == starting assert int(sdp.dataset_windows[0].bounds.end_datetime) - calendar.timegm(datetime.utcnow().timetuple()) < 10 @attr('INT') def test_single_device_single_site(self): from interface.objects import DataProductTypeEnum starting = str(calendar.timegm(datetime(2014, 1, 1, 0).timetuple())) site_1_id, device_1_id, dataset_1_id, deployment_1_id, param_dict_a, data_product_1_id = self.create_device_site_deployment(dep_name="Site 1 - Device 1", starting=starting) site = self.resource_registry.read(site_1_id) # Create SDPs # This logis is also in preload, but testing preload is painful. # Testing it manually here for now. for i, scfg in enumerate(site.stream_configurations): pdict = self.container.resource_registry.find_resources(name=scfg.parameter_dictionary_name, restype=RT.ParameterDictionary, id_only=False)[0][0] # Clone/Create the new ParameterDictionary del pdict._id del pdict._rev sdp_pdict_id, _ = self.container.resource_registry.create(pdict) stream_def_id = self.create_stream_definition(name='CTD 1 - SDP', parameter_dictionary_id=sdp_pdict_id) sdp_id = self.create_data_product(name="SDP_%d" % i, stream_def_id=stream_def_id, stream_configuration=scfg) self.activate_data_product(sdp_id) self.container.resource_registry.create_association(subject=site_1_id, predicate=PRED.hasOutputProduct, object=sdp_id, assoc_type=RT.DataProduct) sdp = self.resource_registry.read(sdp_id) sdp.category = DataProductTypeEnum.SITE self.resource_registry.update(sdp) self.observatory_management.activate_deployment(deployment_id=deployment_1_id) # Get Deployment start time deployment_obj = self.resource_registry.read(deployment_1_id) for constraint in deployment_obj.constraint_list: if constraint.type_ == OT.TemporalBounds: assert constraint.start_datetime == starting # Get information about the new SiteDataProduct that should have been created site_data_product_1_id = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct, id_only=True)[0][0] stream_def_2_id = self.resource_registry.find_objects(site_data_product_1_id, PRED.hasStreamDefinition, id_only=True)[0][0] param_dict_b = self.resource_registry.find_objects(stream_def_2_id, PRED.hasParameterDictionary)[0][0] # Check associations self.assertEquals(self.resource_registry.find_objects(site_1_id, PRED.hasDevice, id_only=True)[0][0], device_1_id) self.assertEquals(self.resource_registry.find_objects(site_1_id, PRED.hasDeployment, id_only=True)[0][0], deployment_1_id) self.assertEquals(self.resource_registry.find_objects(device_1_id, PRED.hasDeployment, id_only=True)[0][0], deployment_1_id) self.assertEquals(self.resource_registry.find_objects(device_1_id, PRED.hasOutputProduct, id_only=True)[0][0], data_product_1_id) self.assertEquals(self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct, id_only=True)[0][0], site_data_product_1_id) site_data_product_1 = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct)[0][0] self.assertEquals(site_data_product_1.category, DataProductTypeEnum.SITE) self.assertEquals(len(site_data_product_1.dataset_windows), 1) assert site_data_product_1.dataset_windows[0].dataset_id == dataset_1_id assert site_data_product_1.dataset_windows[0].bounds.start_datetime == starting assert site_data_product_1.dataset_windows[0].bounds.end_datetime == '' # Check that param dicts have equal members self.assertEquals(param_dict_a.name, param_dict_b.name) self.observatory_management.deactivate_deployment(deployment_id=deployment_1_id) # Verify the window has an ending time site_data_product_1 = self.resource_registry.find_objects(site_1_id, PRED.hasOutputProduct)[0][0] self.assertEquals(site_data_product_1.category, DataProductTypeEnum.SITE) self.assertEquals(len(site_data_product_1.dataset_windows), 1) assert site_data_product_1.dataset_windows[0].dataset_id == dataset_1_id assert site_data_product_1.dataset_windows[0].bounds.start_datetime == starting assert int(site_data_product_1.dataset_windows[0].bounds.end_datetime) - calendar.timegm(datetime.utcnow().timetuple()) < 10 # Verify that data is there granule = self.data_retriever.retrieve(dataset_1_id) rdt = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_allclose(rdt['time'], np.arange(4)) np.testing.assert_allclose(rdt['temp'], np.arange(10, 14))
ooici/coi-services
ion/services/dm/test/test_site_data_products.py
Python
bsd-2-clause
20,250
0.007407
#!/usr/bin/env python # -*- coding: utf-8 -*- ## @copyright # Software License Agreement (BSD License) # # Copyright (c) 2017, Jorge De La Cruz, Carmen Castano. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. __author__ = 'Jorge De La Cruz, Carmen Castano' __copyright__ = 'Copyright (c) 2017 Jorge De La Cruz, Carmen Castano' __license__ = 'BSD' __maintainer__ = 'Jorge De La Cruz' __email__ = 'delacruz@igm.rwth-aachen.de' import sys ## Path to FreeCAD library # change this by your FreeCAD library path sys.path.append('/usr/lib/freecad/lib') import FreeCAD as App import Import from datetime import datetime class GetParameters: def __init__(self): self.filePath = '/home/jdelacruz/Downloads/KonzeptB_lang090715.stp' def loadCAD(self): print('Starting to load the CAD file, please be patient!...') Import.open(self.filePath) self.handler = App.ActiveDocument self.parts = self.handler.Objects print('CAD model loaded!') def writeTxt(self): f = open('data.txt','a') print >>f, 'Name \t Label' self.i = 0 self.size = len(self.parts) self.names = range(self.size) self.labels = range(self.size) for self.part in self.parts: self.names[self.i] = self.part.Name self.labels[self.i] = self.part.Label print >>f, self.part.Name+"\t"+self.part.Label self.i += 1 f.close() print('The txt file has been created successfully!') if __name__ == '__main__': data = GetParameters() data.loadCAD() data.writeTxt()
jdelacruz26/misccode
cad2xls.py
Python
bsd-3-clause
2,955
0.004061
# -*- coding: utf-8 -*- """ user Add the employee relation ship to nereid user :copyright: (c) 2012-2014 by Openlabs Technologies & Consulting (P) Limited :license: GPLv3, see LICENSE for more details. """ from datetime import datetime from nereid import request, jsonify, login_required, route from trytond.pool import Pool, PoolMeta from trytond.model import fields __all__ = ['NereidUser'] __metaclass__ = PoolMeta class NereidUser: """ Add employee """ __name__ = "nereid.user" #: Allow the nereid user to be connected to an internal employee. This #: indicates that the user is an employee and not a regular participant employee = fields.Many2One('company.employee', 'Employee', select=True) member_of_projects = fields.One2Many( "project.work.member", "user", "Member of Projects" ) def serialize(self, purpose=None): ''' Serialize NereidUser and return a dictonary. ''' result = super(NereidUser, self).serialize(purpose) result['image'] = { 'url': self.get_profile_picture(size=20), } result['email'] = self.email result['employee'] = self.employee and self.employee.id or None result['permissions'] = [p.value for p in self.permissions] return result @classmethod @route("/me", methods=["GET", "POST"]) @login_required def profile(cls): """ User profile """ if request.method == "GET" and request.is_xhr: user, = cls.browse([request.nereid_user.id]) return jsonify(user.serialize()) return super(NereidUser, cls).profile() def is_admin_of_project(self, project): """ Check if user is admin member of the given project :param project: Active record of project """ if request.nereid_user.has_permissions(['project.admin']): return True project = project.project assert project.type == 'project' for member in project.members: if member.user == self and member.role == 'admin': return True return False def hours_reported_today(self): """ Returns the number of hours the nereid_user has done on the current date. """ Timesheet = Pool().get('timesheet.line') if not self.employee: return 0.00 current_date = datetime.utcnow().date() lines = Timesheet.search([ ('date', '=', current_date), ('employee', '=', self.employee.id), ]) return sum(map(lambda line: line.hours, lines))
sharoonthomas/nereid-project
user.py
Python
gpl-3.0
2,674
0
# This Python file uses the following encoding: utf-8 """autogenerated by genpy from nasa_r2_common_msgs/ResetTableSceneRequest.msg. Do not edit.""" import sys python3 = True if sys.hexversion > 0x03000000 else False import genpy import struct class ResetTableSceneRequest(genpy.Message): _md5sum = "ba4b0b221fb425ac5eaf73f71ae34971" _type = "nasa_r2_common_msgs/ResetTableSceneRequest" _has_header = False #flag to mark the presence of a Header object _full_text = """bool reset """ __slots__ = ['reset'] _slot_types = ['bool'] def __init__(self, *args, **kwds): """ Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: reset :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields. """ if args or kwds: super(ResetTableSceneRequest, self).__init__(*args, **kwds) #message fields cannot be None, assign default values for those that are if self.reset is None: self.reset = False else: self.reset = False def _get_types(self): """ internal API method """ return self._slot_types def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: buff.write(_struct_B.pack(self.reset)) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))) def deserialize(self, str): """ unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str`` """ try: end = 0 start = end end += 1 (self.reset,) = _struct_B.unpack(str[start:end]) self.reset = bool(self.reset) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: buff.write(_struct_B.pack(self.reset)) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))) def deserialize_numpy(self, str, numpy): """ unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module """ try: end = 0 start = end end += 1 (self.reset,) = _struct_B.unpack(str[start:end]) self.reset = bool(self.reset) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill _struct_I = genpy.struct_I _struct_B = struct.Struct("<B") # This Python file uses the following encoding: utf-8 """autogenerated by genpy from nasa_r2_common_msgs/ResetTableSceneResponse.msg. Do not edit.""" import sys python3 = True if sys.hexversion > 0x03000000 else False import genpy import struct class ResetTableSceneResponse(genpy.Message): _md5sum = "eb13ac1f1354ccecb7941ee8fa2192e8" _type = "nasa_r2_common_msgs/ResetTableSceneResponse" _has_header = False #flag to mark the presence of a Header object _full_text = """bool result """ __slots__ = ['result'] _slot_types = ['bool'] def __init__(self, *args, **kwds): """ Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: result :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields. """ if args or kwds: super(ResetTableSceneResponse, self).__init__(*args, **kwds) #message fields cannot be None, assign default values for those that are if self.result is None: self.result = False else: self.result = False def _get_types(self): """ internal API method """ return self._slot_types def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: buff.write(_struct_B.pack(self.result)) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))) def deserialize(self, str): """ unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str`` """ try: end = 0 start = end end += 1 (self.result,) = _struct_B.unpack(str[start:end]) self.result = bool(self.result) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: buff.write(_struct_B.pack(self.result)) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))) def deserialize_numpy(self, str, numpy): """ unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module """ try: end = 0 start = end end += 1 (self.result,) = _struct_B.unpack(str[start:end]) self.result = bool(self.result) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill _struct_I = genpy.struct_I _struct_B = struct.Struct("<B") class ResetTableScene(object): _type = 'nasa_r2_common_msgs/ResetTableScene' _md5sum = 'c95f6c9db0edf7da4840d218c33352c7' _request_class = ResetTableSceneRequest _response_class = ResetTableSceneResponse
mkhuthir/catkin_ws
src/chessbot/devel/lib/python2.7/dist-packages/nasa_r2_common_msgs/srv/_ResetTableScene.py
Python
gpl-3.0
6,933
0.016443
# -*- encoding: utf-8 -*- """Test class for Foreman Discovery :Requirement: Discoveredhost :CaseAutomation: Automated :CaseLevel: Acceptance :CaseComponent: UI :TestType: Functional :CaseImportance: High :Upstream: No """ import subprocess import time from fauxfactory import gen_string from nailgun import entities from robottelo.decorators import ( run_in_one_thread, run_only_on, skip_if_not_set, stubbed, tier3, upgrade, ) from robottelo.api.utils import configure_provisioning from robottelo.libvirt_discovery import LibvirtGuest from robottelo.test import UITestCase from robottelo.ui.base import UIError from robottelo.ui.factory import ( edit_param, make_discoveryrule, ) from robottelo.ui.locators import common_locators, locators, tab_locators from robottelo.ui.session import Session from time import sleep @run_in_one_thread class DiscoveryTestCase(UITestCase): """Implements Foreman discovery tests in UI.""" def _edit_discovery_fact_column_param(self, session, param_value): """ Edit the 'discovery_fact_column' parameter from settings menu. User can populate a new column on 'Discovered Hosts' page by setting the value of 'discovery_fact_column' """ tab_locator = tab_locators['settings.tab_discovered'] param_name = 'discovery_fact_column' edit_param( session=session, tab_locator=tab_locator, param_name=param_name, value_type='input', param_value=param_value, ) saved_element = self.settings.get_saved_value( tab_locator, param_name) self.assertEqual(param_value, saved_element) def _ping_host(self, host, timeout=60): """Helper to ensure given IP/hostname is reachable after reboot. :param host: A string. The IP or hostname of host. :param int timeout: The polling timeout in seconds. """ timeup = time.time() + int(timeout) while True: command = subprocess.Popen( 'ping -c1 {0}; echo $?'.format(host), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True ) output = command.communicate()[0] # Checking the return code of ping is 0 if time.time() > timeup: return False if int(output.split()[-1]) == 0: return True else: time.sleep(5) @classmethod @skip_if_not_set('vlan_networking') def setUpClass(cls): """Steps to Configure foreman discovery 1. Build PXE default template 2. Create Organization/Location 3. Update Global parameters to set default org and location for discovered hosts. 4. Enable auto_provision flag to perform discovery via discovery rules. """ super(DiscoveryTestCase, cls).setUpClass() # Build PXE default template to get default PXE file entities.ConfigTemplate().build_pxe_default() # Create Org and location cls.org = entities.Organization(name=gen_string('alpha')).create() cls.org_name = cls.org.name cls.loc = entities.Location( name=gen_string('alpha'), organization=[cls.org], ).create() # Update default org and location params to place discovered host cls.discovery_loc = entities.Setting().search( query={'search': 'name="discovery_location"'})[0] cls.discovery_loc.value = cls.loc.name cls.discovery_loc.update({'value'}) cls.discovery_org = entities.Setting().search( query={'search': 'name="discovery_organization"'})[0] cls.discovery_org.value = cls.org.name cls.discovery_org.update({'value'}) # Enable flag to auto provision discovered hosts via discovery rules cls.discovery_auto = entities.Setting().search( query={'search': 'name="discovery_auto"'})[0] cls.default_discovery_auto = str(cls.discovery_auto.value) cls.discovery_auto.value = 'True' cls.discovery_auto.update({'value'}) cls.config_env = configure_provisioning(org=cls.org, loc=cls.loc) @classmethod def tearDownClass(cls): """Restore default 'discovery_auto' global setting's value""" cls.discovery_auto.value = cls.default_discovery_auto cls.discovery_auto.update({'value'}) super(DiscoveryTestCase, cls).tearDownClass() @run_only_on('sat') @tier3 @upgrade def test_positive_pxe_based_discovery(self): """Discover a host via PXE boot by setting "proxy.type=proxy" in PXE default :id: 43a8857d-2f08-436e-97fb-ffec6a0c84dd :Setup: Provisioning should be configured :Steps: PXE boot a host/VM :expectedresults: Host should be successfully discovered :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: hostname = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) self.assertIsNotNone(self.discoveredhosts.search(hostname)) @run_only_on('sat') @tier3 @upgrade def test_positive_pxe_less_with_dhcp_unattended(self): """Discover a host with dhcp via bootable discovery ISO by setting "proxy.type=proxy" in PXE default in unattended mode. :id: fc13167f-6fa0-4fe5-8584-7716292866ce :Setup: Provisioning should be configured :Steps: Boot a host/VM using modified discovery ISO. :expectedresults: Host should be successfully discovered :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest(boot_iso=True) as pxe_less_host: hostname = pxe_less_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) self.assertIsNotNone(self.discoveredhosts.search(hostname)) @run_only_on('sat') @stubbed() @tier3 def test_positive_pxe_less_with_dhcp_semiauto(self): """Discover a host with dhcp via bootable discovery ISO in semi-automated mode. :id: 05c88618-6f15-4eb8-8501-3505160c5450 :Setup: Provisioning should be configured :Steps: Boot a host/VM using discovery ISO :expectedresults: Host should be successfully discovered :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_pxe_less_with_dhcp_interactively(self): """Discover a host with dhcp via bootable discovery ISO using interactive TUI mode. :id: 08780627-9ac1-4837-88eb-df673d974d05 :Setup: Provisioning should be configured :Steps: Boot a host/VM using discovery ISO :expectedresults: Host should be successfully discovered :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_pxe_less_without_dhcp_interactively(self): """Discover a host with single NIC on a network without DHCP and PXE using ISO image in interactive TUI interface. :id: 9703eb00-9857-4076-8b83-031a58d7c1cd :expectedresults: Host should be discovered successfully :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_pxe_less_without_dhcp_semiauto(self): """Discover a host with single NIC on a network without DHCP and PXE using ISO image in semi-automated mode. :id: 8254a85f-21c8-4483-b453-15126762f6e5 :expectedresults: Host should be discovered successfully :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_pxe_less_without_dhcp_unattended(self): """Discover a host with single NIC on a network without DHCP and PXE using ISO image in unattended mode. :id: ae75173f-8358-4886-9420-06cff3a8510e :expectedresults: Host should be discovered successfully :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_discover_pxe_less_with_efi_host_interatively(self): """Discover a EFI host with single NIC on a network using ISO image in interactive TUI mode. :id: f13fd843-6b39-4c5e-bb7a-b9af9e71eb7b :expectedresults: Host should be discovered successfully :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_discover_pxe_less_with_efi_host_unattended(self): """Discover a EFI host with single NIC on a network using ISO image in unattended mode. :id: 515d32ce-44eb-4d27-a353-699bc80fc566 :expectedresults: Host should be discovered successfully :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @tier3 def test_positive_pxe_less_multi_nic_with_dhcp_unattended(self): """Discover a host with multiple NIC on a network with dhcp using ISO image in unattended mode. :id: cdfebc3d-d8c1-4f82-a384-cc5cd9926c65 :expectedresults: Host should be discovered successfully :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) # To show new fact column 'Interfaces' on Discovered Hosts page self._edit_discovery_fact_column_param(session, "interfaces") with LibvirtGuest(boot_iso=True, extra_nic=True) as pxe_less_host: hostname = pxe_less_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) self.assertIsNotNone(self.discoveredhosts.search(hostname)) element = locators['discoveredhosts.fetch_interfaces'] host_interfaces = self.discoveredhosts.fetch_fact_value( hostname, element) self.assertEqual(u'eth0,eth1,lo', host_interfaces) @run_only_on('sat') @stubbed() @tier3 def test_positive_pxe_less_multi_nic_with_dhcp_interactively(self): """Discover a host with multiple NIC on a network with dhcp using ISO image in interactive TUI mode. :id: e29c7f71-096e-42ef-9bbf-77fecac86a9c :expectedresults: Host should be discovered successfully :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_pxe_less_multi_nic_without_dhcp_interactively(self): """Discover a host with multiple NIC on a network without dhcp using ISO image in interactive TUI mode. :id: 206a375c-3f42-4cc8-b338-bb85127cffc9 :expectedresults: Host should be discovered successfully :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_pxe_less_multi_nic_without_dhcp_unattended(self): """Discover a host with multiple NIC on a network without dhcp using ISO image in unattended mode. :id: 1e25326d-2976-4a12-8e02-c4be6705f522 :expectedresults: Host should be discovered successfully :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @tier3 def test_positive_pxe_multi_nic_unattended(self): """Discover a host with multiple NIC on a network with dhcp using pxe in unattended mode. :id: 0d004ed0-594f-492f-8756-33349094aa8e :expectedresults: Host should be discovered successfully :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) # To show new fact column 'Interfaces' on Discovered Hosts page self._edit_discovery_fact_column_param(session, "interfaces") with LibvirtGuest(extra_nic=True) as pxe_host: hostname = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) self.assertIsNotNone(self.discoveredhosts.search(hostname)) element = locators['discoveredhosts.fetch_interfaces'] host_interfaces = self.discoveredhosts.fetch_fact_value( hostname, element) self.assertEqual(u'eth0,eth1,lo', host_interfaces) @run_only_on('sat') @tier3 def test_custom_facts_discovery(self): """Check if defined custom facts are displayed under host's facts :id: 5492e063-72db-44b8-a34a-9c75c351b89a :Setup: Provisioning should be configured :Steps: Validate specified custom facts :expectedresults: All defined custom facts should be displayed correctly :CaseLevel: System """ param_value = 'myfact' with Session(self) as session: session.nav.go_to_select_org(self.org_name) # To show new fact column 'Interfaces' on Discovered Hosts page self._edit_discovery_fact_column_param(session, param_value) with LibvirtGuest(boot_iso=True) as pxe_less_host: hostname = pxe_less_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) element = locators['discoveredhosts.fetch_custom_fact'] custom_fact = self.discoveredhosts.fetch_fact_value( hostname, element) self.assertEqual(u'somevalue', custom_fact) @run_only_on('sat') @tier3 def test_positive_provision_from_facts(self): """Provision the selected discovered host from facts page by clicking 'provision' :id: 610bbf32-b342-44ef-8339-0201e0592260 :Setup: Host should already be discovered :expectedresults: Host should be provisioned successfully and entry from discovered host should be auto removed :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: host_name = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_name) ) self.discoveredhosts.provision_discoveredhost( hostname=host_name, hostgroup=self.config_env['host_group'], org=self.org_name, loc=self.loc.name, facts_page=True, quick_create=True) # the provisioning take some time to finish, when done will be # redirected to the created host # wait until redirected to host page pxe_host_name = '{0}.{1}'.format( host_name, self.config_env['domain']) self.assertIsNotNone( session.hosts.wait_until_element( locators["host.host_page_title"] % pxe_host_name, timeout=160 ) ) host_properties = session.hosts.get_host_properties( pxe_host_name, ['status']) self.assertTrue(host_properties) self.assertEqual(host_properties['status'], 'OK') # Check that provisioned host is not in the list of discovered # hosts anymore self.assertIsNone(self.discoveredhosts.search(host_name)) @run_only_on('sat') @tier3 def test_positive_delete(self): """Delete the selected discovered host :id: 25a2a3ea-9659-4bdb-8631-c4dd19766014 :Setup: Host should already be discovered :expectedresults: Selected host should be removed successfully :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: hostname = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) self.discoveredhosts.delete(hostname, dropdown_present=True) @run_only_on('sat') @tier3 def test_positive_delete_from_facts(self): """Delete the selected discovered host from facts page :id: 892aa809-bcf0-46ae-8495-70d7a6483b75 :Setup: Host should already be discovered :expectedresults: Selected host should be removed successfully :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: hostname = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) self.discoveredhosts.delete_from_facts(hostname) self.assertIsNone(self.discoveredhosts.search(hostname)) @run_only_on('sat') @tier3 def test_positive_delete_multiple(self): """Delete multiple discovered hosts from 'Select Action' drop down :id: 556fb306-512f-46a4-8a0f-af8013161efe :Setup: Host should already be discovered :expectedresults: Selected host should be removed successfully :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_1_host: host_1_name = pxe_1_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_1_name) ) with LibvirtGuest() as pxe_2_host: host_2_name = pxe_2_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_2_name) ) hostnames = [host_1_name, host_2_name] for hostname in hostnames: host = self.discoveredhosts.search(hostname) if not host: raise UIError( 'Could not find the selected discovered host ' '"{0}"'.format(hostname) ) self.discoveredhosts.navigate_to_entity() # To delete multiple discovered hosts self.discoveredhosts.multi_delete(hostnames) for hostname in [host_1_name, host_2_name]: self.assertIsNone( self.discoveredhosts.search(hostname) ) @run_only_on('sat') @tier3 def test_positive_refresh_facts_pxe(self): """Refresh the facts of pxe-based discovered host by adding a new NIC. :id: cda4103c-6d1a-4f9e-bf57-e516ef1f2a37 :Setup: Host should already be discovered :expectedresults: Facts should be refreshed successfully with new NIC :CaseLevel: System """ param_value = 'interfaces' with Session(self) as session: session.nav.go_to_select_org(self.org_name) # To show new fact column 'Interfaces' on Discovered Hosts page self._edit_discovery_fact_column_param(session, param_value) with LibvirtGuest() as pxe_host: hostname = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) self.assertIsNotNone(self.discoveredhosts.search(hostname)) # To add a new network interface on discovered host pxe_host.attach_nic() # To refresh the facts of discovered host, # UI should show newly added interface on refresh_facts self.discoveredhosts.refresh_facts(hostname) element = locators['discoveredhosts.fetch_interfaces'] host_interfaces = self.discoveredhosts.fetch_fact_value( hostname, element) self.assertEqual(u'eth0,eth1,lo', host_interfaces) @run_only_on('sat') @tier3 def test_positive_refresh_facts_pxe_less(self): """Refresh the facts of pxe-less discovered host by adding a new NIC. :id: 367a5336-a0fa-491b-8153-3e39d68eb978 :Setup: Host should already be discovered :expectedresults: Facts should be refreshed successfully with new NIC :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) # To show new fact column 'Interfaces' on Discovered Hosts page self._edit_discovery_fact_column_param(session, 'interfaces') with LibvirtGuest(boot_iso=True) as pxe_less_host: hostname = pxe_less_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) self.assertIsNotNone(self.discoveredhosts.search(hostname)) # To add a new network interface on discovered host pxe_less_host.attach_nic() # To refresh the facts of discovered host, # UI should show newly added interface on refresh_facts self.discoveredhosts.refresh_facts(hostname) element = locators['discoveredhosts.fetch_interfaces'] host_interfaces = self.discoveredhosts.fetch_fact_value( hostname, element) self.assertEqual(u'eth0,eth1,lo', host_interfaces) @run_only_on('sat') @tier3 def test_positive_reboot(self): """Reboot a discovered host. :id: 5edc6831-bfc8-4e69-9029-b4c0caa3ee32 :Setup: Host should already be discovered :expectedresults: Host should be successfully rebooted. :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: hostname = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) element = (locators['discoveredhosts.fetch_ip'] % hostname) # Get the IP of discovered host host_ip = self.discoveredhosts.fetch_fact_value( hostname, element) # Check if host is reachable via IP self.assertTrue(self._ping_host(host_ip)) self.discoveredhosts.reboot_host(hostname) for _ in range(12): response = self._ping_host(host_ip, timeout=5) if not response: break sleep(5) else: self.fail('Host was not stopped') @run_only_on('sat') @tier3 def test_positive_update_default_org(self): """Change the default org of more than one discovered hosts from 'Select Action' drop down :id: fe6ab6e0-c942-46c1-8ae2-4f4caf00e0d8 :Setup: Host should already be discovered :expectedresults: Default org should be successfully changed for multiple hosts :CaseLevel: System """ new_org = gen_string('alpha') entities.Organization(name=new_org).create() with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_1_host: host_1_name = pxe_1_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_1_name) ) with LibvirtGuest() as pxe_2_host: host_2_name = pxe_2_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_2_name) ) hostnames = [host_1_name, host_2_name] for hostname in hostnames: self.assertIsNotNone( self.discoveredhosts.search(hostname)) self.discoveredhosts.update_org_loc(hostnames, new_org) @run_only_on('sat') @tier3 def test_positive_update_default_location(self): """Change the default location of more than one discovered hosts from 'Select Action' drop down :id: 537bfb51-144a-44be-a087-d2437f074464 :Setup: Host should already be discovered :expectedresults: Default Location should be successfully changed for multiple hosts :CaseLevel: System """ loc = entities.Location().create() with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_1_host: host_1_name = pxe_1_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_1_name) ) with LibvirtGuest() as pxe_2_host: host_2_name = pxe_2_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_2_name) ) hostnames = [host_1_name, host_2_name] for hostname in hostnames: self.assertIsNotNone( self.discoveredhosts.search(hostname)) self.discoveredhosts.update_org_loc( hostnames, new_loc=loc.name) @run_only_on('sat') @stubbed() @tier3 def test_positive_auto_provision_host_with_rule(self): """Create a new discovery rule and provision a discovered host using that discovery rule. Set query as (e.g IP=IP_of_discovered_host) :id: 00686008-87eb-4b76-9579-ceddb578ef31 :Setup: Host should already be discovered :expectedresults: Host should reboot and provision :CaseLevel: System :caseautomation: notautomated """ @run_only_on('sat') @tier3 @upgrade def test_positive_manual_provision_host_with_rule(self): """Create a new discovery rule and manually provision a discovered host using that discovery rule. Set query as (e.g IP=IP_of_discovered_host) :id: 4488ab9a-d462-4a62-a1a1-e5656c8a8b99 :Setup: Host should already be discovered :expectedresults: Host should reboot and provision :CaseLevel: System """ rule_name = gen_string('alpha') with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: host_name = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_name) ) element = (locators['discoveredhosts.fetch_ip'] % host_name) # Get the IP of discovered host host_ip = self.discoveredhosts.fetch_fact_value( host_name, element) # Define a discovery rule with IP_address make_discoveryrule( session, name=rule_name, host_limit=1, hostgroup=self.config_env['host_group'], search_rule=host_ip, locations=[self.loc.name], ) self.assertIsNotNone(self.discoveryrules.search(rule_name)) self.discoveredhosts.auto_provision(host_name) self.assertIsNotNone(self.discoveredhosts.wait_until_element( common_locators['notif.success'])) self.assertIsNotNone(self.hosts.search( u'{0}.{1}'.format(host_name, self.config_env['domain']))) # Check that provisioned host is not in the list of discovered # hosts anymore self.assertIsNone(self.discoveredhosts.search(host_name)) @run_only_on('sat') @stubbed() @tier3 def test_positive_provision_multi_host_with_rule(self): """Create a new discovery rule with (host_limit = 0) that applies to multi hosts. Set query as cpu_count = 1 OR mem > 500 :id: d25c088f-ee7a-4a3a-9b51-8f65f545e680 :Setup: Multiple hosts should already be discovered in same subnet. :expectedresults: All Hosts of same subnet should reboot and provision :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_provision_with_rule_priority(self): """Create multiple discovery rules with different priority and check rule with highest priority executed first :id: 8daf0b35-912b-441d-97d3-45f48799f4ba :Setup: Multiple hosts should already be discovered :expectedresults: Host with lower count have higher priority and that rule should be executed first. :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @tier3 def test_positive_provision_without_auto_provision(self): """Create a discovery rule and execute it when "auto_provisioning" flag set to 'false' :id: 25f5112b-7bbd-4bda-8d75-c43bd6390aa8 :Setup: Host should already be discovered :expectedresults: Host should not be rebooted automatically :CaseLevel: System """ try: # Disable flag to auto provision discovery_auto = entities.Setting().search( query={'search': 'name="discovery_auto"'})[0] default_discovery_auto = discovery_auto.value discovery_auto.value = 'False' discovery_auto.update(['value']) rule_name = gen_string('alpha') with Session(self) as session: session.nav.go_to_select_org(self.org_name) # Define a discovery rule make_discoveryrule( session, name=rule_name, host_limit=1, hostgroup=self.config_env['host_group'], search_rule='cpu_count = 1', locations=[self.loc.name], ) self.assertIsNotNone(self.discoveryrules.search(rule_name)) with LibvirtGuest() as pxe_host: host_name = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_name) ) self.assertIsNotNone( self.discoveredhosts.search(host_name)) # Check that host shouldn't list under all hosts self.assertIsNone(self.hosts.search( u'{0}.{1}'.format(host_name, self.config_env['domain']) )) # Check that host still listed under discovered hosts self.assertIsNotNone( self.discoveredhosts.search(host_name)) finally: # Revert the discovery_auto flag to default value discovery_auto.value = default_discovery_auto discovery_auto.update(['value']) @run_only_on('sat') @stubbed() @tier3 def test_negative_create_discovery_rule(self): """Create a discovery rule with invalid query e.g. BIOS = xyz :id: 89014adf-6346-4681-9107-6d92e14b6a3e :Setup: Host should already be discovered :expectedresults: Rule should automatically be skipped on clicking 'Auto provision'. UI Should raise 'No matching rule found' :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_multi_provision_with_rule_limit(self): """Create a discovery rule (CPU_COUNT = 2) with host limit 1 and provision more than one host with same rule :id: ab14c56d-331f-466b-aeb0-41fb19f7b3aa :Setup: Host with two CPUs should already be discovered :expectedresults: Rule should only be applied to one discovered host and for other rule should already be skipped. :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_update_discovery_rule(self): """Update an existing rule and execute it :id: 0969cf6f-215d-44c5-96b5-91cb1d865ad0 :Setup: Host should already be discovered :expectedresults: User should be able to update the rule and it should be executed on discovered host :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed('unstub once os/browser/env combination is changed') @tier3 def test_positive_update_name(self): """Update the discovered host name and provision it :id: 3770b007-5006-4815-ae03-fbd330aad304 :Setup: Host should already be discovered :expectedresults: The hostname should be updated and host should be provisioned :CaseLevel: System """ name = gen_string('alpha').lower() with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: host_name = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_name) ) self.discoveredhosts.provision_discoveredhost( hostname=host_name, hostgroup=self.config_env['host_group'], org=self.org_name, loc=self.loc.name, new_name=name) new_host_name = ( u'{0}.{1}'.format(name, self.config_env['domain'])) self.assertIsNotNone(self.hosts.search(new_host_name)) # Check that provisioned host is not in the list of discovered # hosts anymore self.assertIsNone(self.discoveredhosts.search(host_name)) @run_only_on('sat') @tier3 def test_positive_auto_provision_all(self): """Discover a bunch of hosts and auto-provision all :id: e26129b5-16fa-418c-b768-21670e9f0b74 :expectedresults: All host should be successfully rebooted and provisioned :CaseLevel: System """ rule_name = gen_string('alpha') with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_1_host, LibvirtGuest() as pxe_2_host: host_1_name = pxe_1_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_1_name) ) host_2_name = pxe_2_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_2_name) ) # Define a discovery rule make_discoveryrule( session, name=rule_name, host_limit=2, hostgroup=self.config_env['host_group'], search_rule='cpu_count = 1', locations=[self.loc.name], ) self.assertIsNotNone(self.discoveryrules.search(rule_name)) self.discoveredhosts.auto_provision_all() hostnames = [host_1_name, host_2_name] for hostname in hostnames: self.assertIsNotNone(self.hosts.search( u'{0}.{1}'.format( hostname, self.config_env['domain']))) # Check that provisioned host is not in the list of # discovered hosts anymore self.assertIsNotNone( self.discoveredhosts.search( hostname, expecting_results=False)) @run_only_on('sat') @tier3 def test_positive_add_fact_column(self): """Add a new fact column to display on discovered host page :id: 914bd47f-b2a6-459e-b166-70dbc9ce1bc6 :Steps: 1. Goto settings -> Discovered tab -> discovery_fact_coloumn 2. Edit discovery_fact_coloumn 3. Add bios_vendor :expectedresults: The added fact should be displayed on 'discovered_host' page after successful discovery :CaseLevel: System """ param_value = 'bios_vendor' with Session(self) as session: session.nav.go_to_select_org(self.org_name) # To show new fact column 'Interfaces' on Discovered Hosts page self._edit_discovery_fact_column_param(session, param_value) with LibvirtGuest() as pxe_host: hostname = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) element = locators['discoveredhosts.fetch_bios'] host_bios = self.discoveredhosts.fetch_fact_value( hostname, element) self.assertEqual(u'Seabios', host_bios) @run_only_on('sat') @tier3 def test_negative_add_fact(self): """Add a new fact column with invalid fact to display on discovered host page :id: 4e9bc843-4ba2-40d4-a1b3-2d7be117664f :Steps: 1. Goto settings -> Discovered tab -> discovery_fact_coloumn 2. Edit discovery_fact_coloumn 3. Add 'test' :expectedresults: The added fact should be displayed on 'discovered_host' page after successful discovery and shows 'N/A' :CaseLevel: System """ param_value = 'test' expected_value = u'N/A' with Session(self) as session: session.nav.go_to_select_org(self.org_name) # To show new fact column 'Interfaces' on Discovered Hosts page self._edit_discovery_fact_column_param(session, param_value) with LibvirtGuest() as pxe_host: hostname = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(hostname) ) element = ( locators['discoveredhosts.fetch_fact'] % expected_value ) fact_value = self.discoveredhosts.fetch_fact_value( hostname, element) self.assertEqual(expected_value, fact_value) @run_only_on('sat') @stubbed() @tier3 def test_positive_discovery_manager_role(self): """Assign 'Discovery_Manager' role to a normal user :id: c219c877-e785-41a3-9abe-803a9b26bcad :expectedresults: User should be able to view, provision, edit and destroy one or more discovered host as well view, create_new, edit, execute and delete discovery rules. :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_discovery_reader_role(self): """Assign 'Discovery Reader" role to a normal user :id: 075bd559-a3bb-42ca-86a4-60581c650a1d :expectedresults: User should be able to view existing discovered host and rule :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_validate_pxe_less_discovery_status_screen(self): """Validate all the buttons from "Discovery Status" TUI screen of a pxe-less discovered host :id: a18694ad-7642-472f-8e7c-c911c892a763 :expectedresults: All buttons should work :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_negative_validate_network_config_screen(self): """Validate network configuration screen by specifying invalid IP/gateway/DNS address notation. :id: b1d24367-9a7e-4d8e-85b6-989d8c520498 :expectedresults: User should get an error message :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_negative_pxe_less_discovery_without_dhcp(self): """Discover a host via pxe-less and select "Discover using DHCP" interactively when no dhcp is available. :id: adef940c-8948-4cd9-88b3-f0b307134536 :expectedresults: User should get an error message "Unable to bring network via DHCP" and click on 'OK' should open the ''Network configuration screen" to manually specify the IP/GW/DNS. :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_provision_with_org_loc_from_new_model_window(self): """Provision a discovered host manually by associating org & loc from host properties model window and select create host button. :id: 8c6a7d3f-e34e-4888-9b1c-58e71ee584a3 :expectedresults: Provisioned host is associated with selected org & location :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @tier3 def test_positive_provision_with_hostgroup_from_new_model_window(self): """Provision a discovered host manually by associating hostgroup from host properties model window and select create host button. :id: f17fb8c9-f9cb-4547-80bc-3b40c6691bb1 :expectedresults: Provisioned host is created with selected host-group and entry from discovered host should be auto removed. :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: host_name = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_name) ) self.discoveredhosts.provision_discoveredhost( hostname=host_name, hostgroup=self.config_env['host_group'], org=self.org_name, loc=self.loc.name) self.assertIsNotNone(self.hosts.search( u'{0}.{1}'.format(host_name, self.config_env['domain']))) # Check that provisioned host is not in the list of discovered # hosts anymore self.assertIsNone(self.discoveredhosts.search(host_name)) @run_only_on('sat') @tier3 @upgrade def test_positive_provision_using_quick_host_button(self): """Associate hostgroup while provisioning a discovered host from host properties model window and select quick host. :id: 34c1e9ea-f210-4a1e-aead-421eb962643b :Setup: 1. Host should already be discovered 2. Hostgroup should already be created with all required entities. :expectedresults: Host should be quickly provisioned and entry from discovered host should be auto removed. :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: host_name = pxe_host.guest_name self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_name) ) self.assertIsNotNone(self.discoveredhosts.search(host_name)) self.discoveredhosts.provision_discoveredhost( hostname=host_name, hostgroup=self.config_env['host_group'], org=self.org_name, loc=self.loc.name, quick_create=True) self.assertIsNotNone(self.hosts.search( u'{0}.{1}'.format(host_name, self.config_env['domain']))) # Check that provisioned host is not in the list of discovered # hosts anymore self.assertIsNone(self.discoveredhosts.search(host_name)) @run_only_on('sat') @stubbed() @tier3 def test_positive_provision_with_facts_set_by_user(self): """Provision a discovered host with clear_all_facts setting's default value 'No' :id: 5dbb9a9f-117d-41aa-8f15-d4da6163b244 :Setup: 1. Host should already be discovered 2. Go to setting -> clear_all_facts -> No :expectedresults: After successful provisioning, all facts set by user should be visible, including the one started with discovery keyword. :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_provision_with_clear_facts_set_by_user(self): """Provision a discovered host by setting clear_all_facts value to 'Yes' :id: 9f153b3a-4c21-41a2-b2a0-a0b1bee262d3 :Setup: 1. Host should already be discovered 2. Go to setting -> clear_all_facts -> Yes :expectedresults: After successful provisioning, all facts set by user should be deleted execpt the one started with discovery keyword. :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_lock_discovered_host_into_discovery(self): """Lock host into discovery via PXE configuration :id: 4ba9f923-0b8f-40ee-8bcb-90ff496587c4 :Steps: 1. Go to setting -> discovery_lock -> true 2. Go to setting -> discovery_lock_template -> template to be locked with :expectedresults: Host should boot into discovery mode and should be discovered. :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_populate_puppet_params_using_hostgroup(self): """On provisioning a host associate hostgroup and see if PuppetCA and Puppetmaster are being populated. :id: 21e55ffa-02bc-4f96-b463-887da30fb1c4 :Steps: 1. Discover a host 2. Create a hostgroup with puppetCA and puppetmaster :expectedresults: Parameters like PuppetCA/Puppetmaster should be populated on associating hostgroup to discovered host :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_update_default_org_from_settings(self): """Update the default 'Discovery Organization' settings to place the discovered hosts in. :id: 596a98ad-90f6-42ff-b8ef-47f02dc5d595 :Steps: 1. Go to setting -> Discovered -> Discovery organization 2. Update default org from dropdown :expectedresults: Discovered host should automatically be placed in selected default org :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_update_default_location_from_settings(self): """Update the default 'Discovery Location' settings to place the discovered hosts in. :id: 4bba9899-a53e-4521-b212-aee893f7a726 :Steps: 1. Go to setting -> Discovered -> Discovery Location 2. Update default location from dropdown :expectedresults: Discovered host should automatically be placed in selected default location :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_check_network_facts(self): """Check if network facts ending with _eth0 are correctly displayed under discovered host page :id: 5a06236c-05dc-4a98-b1b5-9586c95203f9 :expectedresults: Network facts like below should be displayed on discovered host page: 1. facts ending with _eth0 2. auto_negotiation_XXX 3. LLDAP facts like lldp_neighbor_portid_XXX :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_rebuild_dns_on_provisioning(self): """Force DNS rebuild when provisioning discovered host :id: 87aa3279-7c29-40e8-a4d2-0aab43f0972f :Setup: Make sure 'discovery_always_rebuild_dns' setting set to true :expectedresults: DNS record should be recreated on provisioning discovered host :caseautomation: notautomated :CaseLevel: System """ class DiscoveryPrefixTestCase(UITestCase): """Test around updating Discovery Prefix""" @classmethod def setUpClass(cls): """Update discovery prefix with some string than default 'mac'""" super(DiscoveryPrefixTestCase, cls).setUpClass() cls.org = entities.Organization(name=gen_string('alpha')).create() cls.org_name = cls.org.name # Update hostname_prefix with some string other than default 'mac' cls.prefix = 'dhost' cls.discovery_prefix = entities.Setting().search( query={'search': 'name="discovery_prefix"'})[0] cls.default_prefix = str(cls.discovery_prefix.value) cls.discovery_prefix.value = cls.prefix cls.discovery_prefix.update(['value']) cls.discovery_org = entities.Setting().search( query={'search': 'name="discovery_organization"'})[0] cls.discovery_org.value = cls.org.name cls.discovery_org.update(['value']) @classmethod def tearDownClass(cls): """Restore default 'hostname_prefix' global setting's value""" cls.discovery_prefix.value = cls.default_prefix cls.discovery_prefix.update(['value']) super(DiscoveryPrefixTestCase, cls).tearDownClass() @run_only_on('sat') @tier3 def test_positive_update_discovery_prefix(self): """Update the discovery_prefix parameter other than mac :id: 08f1d852-e9a0-430e-b73a-e2a7a144ac10 :Steps: 1. Goto settings -> Discovered tab -> discovery_prefix 2. Edit discovery_prefix using any text that must start with a letter :Setup: Host should already be discovered :expectedresults: Host should be discovered with updated prefix. :CaseLevel: System """ with Session(self) as session: session.nav.go_to_select_org(self.org_name) with LibvirtGuest() as pxe_host: host_mac = pxe_host.mac host_name = '{0}{1}'.format( self.prefix, host_mac.replace(':', "") ) self.assertTrue( self.discoveredhosts.waitfordiscoveredhost(host_name) ) self.assertIsNotNone(self.discoveredhosts.search(host_name)) @run_only_on('sat') @stubbed() @tier3 def test_positive_update_hostname_settings(self): """Update the hostname_prefix and Hostname_facts settings and discover a host :id: e53fa8a9-baf9-4019-83f3-64949461b892 :Steps: 1. Goto settings -> Discovered tab -> Hostname_prefix 2. Edit hostname_prefix using any text that must start with a letter e.g. 'discover' 3. Goto settings ->Discovered tab -> Hostname_facts 4. Edit hostname_facts using 'discovery_bootif' :expectedresults: Host should be discovered with name as 'Hostname_prefix + hostname_facts'. :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_update_hostname_with_empty_prefix(self): """Update the hostname_prefix to empty and Hostname_facts settings with 'UUID' :id: 537a616f-3b29-4fae-98a2-e27114bea14a :Steps: 1. Goto settings -> Discovered tab -> Hostname_prefix 2. Set empty hostname_prefix 3. Goto settings ->Discovered tab -> Hostname_facts 4. Edit hostname_facts using 'UUID' :expectedresults: Host should be discovered with UUID in name :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_update_hostname_with_facts_list(self): """Update the Hostname_facts settings with list of hostname_facts :id: e0d2791d-d106-491a-a8fd-bf42c8f411d4 :Steps: 1. Goto settings ->Discovered tab -> Hostname_facts 2. Edit hostname_facts using 'bios_vendor', uuid' :expectedresults: Host should be discovered with first available fact bios_vendor :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_positive_update_hostname_with_unknown_fact(self): """Update the Hostname_facts settings with list of hostname_facts where the first available fact doesn't exist :id: a7f64c8a-42dc-459d-8de0-f4d6b878c731 :Steps: 1. Goto settings ->Discovered tab -> Hostname_facts 2. Edit hostname_facts using 'bios', uuid' :expectedresults: Host should be discovered with second fact uuid as bios fact doesn't exist :caseautomation: notautomated :CaseLevel: System """ @run_only_on('sat') @stubbed() @tier3 def test_negative_discover_host_with_existing_hostname(self): """Discover a host with similar to hostname which already exists :id: ea9a9a11-a02a-408e-86fe-ab5d0b0a94fe :Steps: 1. Goto settings -> Discovered tab -> Hostname_prefix 2. Set empty hostname_prefix (without any value) 3. Goto settings ->Discovered tab -> Hostname_facts 4. update hostname_facts with 'bios_vendor' 5. Discover two host :expectedresults: Error should be raised on discovering second host like: Name has already been taken :caseautomation: notautomated :CaseLevel: System """
sghai/robottelo
tests/foreman/ui/test_discoveredhost.py
Python
gpl-3.0
56,249
0.000018
import sys from beaker.container import NamespaceManager, Container from beaker.exceptions import InvalidCacheBackendError, MissingCacheParameter from beaker.synchronization import _threading, Synchronizer from beaker.util import verify_directory, SyncDict try: import cmemcache as memcache except ImportError: try: import memcache except ImportError: raise InvalidCacheBackendError("Memcached cache backend requires either the 'memcache' or 'cmemcache' library") class MemcachedNamespaceManager(NamespaceManager): clients = SyncDict(_threading.Lock(), {}) def __init__(self, namespace, url, data_dir=None, lock_dir=None, **params): NamespaceManager.__init__(self, namespace, **params) if lock_dir is not None: self.lock_dir = lock_dir elif data_dir is None: raise MissingCacheParameter("data_dir or lock_dir is required") else: self.lock_dir = data_dir + "/container_mcd_lock" verify_directory(self.lock_dir) self.mc = MemcachedNamespaceManager.clients.get(url, lambda: memcache.Client(url.split(';'), debug=0)) # memcached does its own locking. override our own stuff def do_acquire_read_lock(self): pass def do_release_read_lock(self): pass def do_acquire_write_lock(self, wait = True): return True def do_release_write_lock(self): pass # override open/close to do nothing, keep memcache connection open as long # as possible def open(self, *args, **params):pass def close(self, *args, **params):pass def __getitem__(self, key): cache_key = key.replace(' ', '\302\267') keys = [self.namespace + '_' + cache_key, self.namespace + ':keys'] key_dict = self.mc.get_multi(keys) if cache_key not in key_dict.get(self.namespace+':keys', {}): raise KeyError(key) return key_dict[self.namespace + '_' + cache_key] def __contains__(self, key): return self.has_key(key) def has_key(self, key): key = key.replace(' ', '\302\267') keys = self.mc.get(self.namespace + ':keys') or {} return key in keys def __setitem__(self, key, value): key = key.replace(' ', '\302\267') keys = self.mc.get(self.namespace + ':keys') if keys is None: keys = {} keys[key] = True self.mc.set(self.namespace + ':keys', keys) self.mc.set(self.namespace + "_" + key, value) def __delitem__(self, key): key = key.replace(' ', '\302\267') keys = self.mc.get(self.namespace + ':keys') try: del keys[key] self.mc.delete(self.namespace + "_" + key) self.mc.set(self.namespace + ':keys', keys) except KeyError: raise def do_remove(self): keys = self.mc.get(self.namespace + ':keys') if keys is not None: delete_keys = [self.namespace + '_' + x for x in keys] delete_keys.append(self.namespace + ':keys') self.mc.delete_multi(delete_keys) def keys(self): keys = self.mc.get(self.namespace + ':keys') if keys is None: return [] else: return [x.replace('\302\267', ' ') for x in keys.keys()] class MemcachedContainer(Container): def do_init(self, data_dir=None, lock_dir=None, **params): self.funclock = None def create_namespace(self, namespace, url, **params): return MemcachedNamespaceManager(namespace, url, **params) create_namespace = classmethod(create_namespace) def lock_createfunc(self, wait = True): if self.funclock is None: self.funclock = Synchronizer(identifier = "memcachedcontainer/funclock/%s" % self.namespacemanager.namespace, use_files = True, lock_dir = self.namespacemanager.lock_dir) return self.funclock.acquire_write_lock(wait) def unlock_createfunc(self): self.funclock.release_write_lock()
appcelerator/entourage
components/services/appengine/stub/beaker/ext/memcached.py
Python
apache-2.0
4,037
0.006688
""" Forms for use with User objects """ from django import forms from django.contrib.auth.models import User class UserForm(forms.ModelForm): """ Form for django.contrib.auth.models.User """ class Meta: """ Meta data for User Form """ model = User fields = ('username', 'email', 'password') def __init__(self, *args, **kwargs): super(UserForm, self).__init__(*args, **kwargs) self.fields['username'].required = True self.fields['email'].required = True self.fields['password'].required = True def save(self, commit=True): """ Override save so creates a user using create_user method on User model :param commit: Commit to DB or not :return: Instance of UserForm """ instance = super(UserForm, self).save(commit=False) User.objects.create_user( username=self.cleaned_data.get('username'), password=self.cleaned_data.get('password'), email=self.cleaned_data.get('email') ) return instance
Gimpneek/jobseek
jobseekr/cv/forms/user.py
Python
agpl-3.0
1,096
0
from django import forms from django.contrib.auth.models import User from .models import Message from tinymce.widgets import TinyMCE class MessageForm(forms.Form): recipient = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control'})) subject = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control'})) content = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30})) encrypted = forms.BooleanField(required=False) #class Meta: # model = Message # fields = ('recipient', 'subject', 'content', 'encrypted',) class KeyForm(forms.Form): pem_file = forms.FileField()
j-windsor/cs3240-f15-team21-v2
post/forms.py
Python
mit
640
0.015625
#!/usr/bin/env python2.7 import os import sys this_dir = os.path.dirname(os.path.abspath(__file__)) trunk_dir = os.path.split(this_dir)[0] sys.path.insert(0,trunk_dir) from ikol.dbregister import DataBase from ikol import var if os.path.exists(var.DB_PATH): os.remove(var.DB_PATH) DB = DataBase(var.DB_PATH) DB.insertPlaylist("loLWOCl7nlk","test") DB.insertPlaylist("loLWO357nlk","testb") DB.insertVideo("KDk2341oEQQ","loLWOCl7nlk","test") DB.insertVideo("KDktIWeoE23","loLWOCl7nlk","testb") print DB.getAllVideosByPlaylist("loLWOCl7nlk") print DB.getVideoById("KDk2341oEQQ")
lokiteitor/ikol
test/DBtest.py
Python
gpl-2.0
589
0.018676
# encoding: utf-8 # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # # Author: Kyle Lahnakoski (kyle@lahnakoski.com) # from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import import datetime from socket import timeout as socket_timeout from kombu import Connection, Producer, Exchange from pytz import timezone from mozillapulse.utils import time_to_string from mo_logs import constants from pyLibrary import jsons from mo_logs.exceptions import Except, suppress_exception from mo_logs import Log from mo_dots import wrap, coalesce, Data, set_default from mo_kwargs import override from mo_threads import Thread, Lock from mozillapulse.consumers import GenericConsumer count_locker=Lock() count=0 class Consumer(Thread): @override def __init__( self, exchange, # name of the Pulse exchange topic, # message name pattern to subscribe to ('#' is wildcard) target=None, # WILL BE CALLED WITH PULSE PAYLOADS AND ack() IF COMPLETE$ED WITHOUT EXCEPTION target_queue=None, # (aka self.queue) WILL BE FILLED WITH PULSE PAYLOADS host='pulse.mozilla.org', # url to connect, port=5671, # tcp port user=None, password=None, vhost="/", start=0, # USED AS STARTING POINT FOR ASSIGNING THE _meta.count ATTRIBUTE ssl=True, applabel=None, heartbeat=False, # True to also get the Pulse heartbeat message durable=False, # True to keep queue after shutdown serializer='json', broker_timezone='GMT', kwargs=None ): global count count = coalesce(start, 0) self.target_queue = target_queue self.pulse_target = target if (target_queue == None and target == None) or (target_queue != None and target != None): Log.error("Expecting a queue (for fast digesters) or a target (for slow digesters)") Thread.__init__(self, name="Pulse consumer for " + kwargs.exchange, target=self._worker) self.settings = kwargs kwargs.callback = self._got_result kwargs.user = coalesce(kwargs.user, kwargs.username) kwargs.applabel = coalesce(kwargs.applable, kwargs.queue, kwargs.queue_name) kwargs.topic = topic self.pulse = ModifiedGenericConsumer(kwargs, connect=True, **kwargs) self.start() def _got_result(self, data, message): global count data = wrap(data) with count_locker: Log.note("{{count}} from {{exchange}}", count=count, exchange=self.pulse.exchange) data._meta.count = count data._meta.exchange = self.pulse.exchange count += 1 if self.settings.debug: Log.note("{{data}}", data= data) if self.target_queue != None: try: self.target_queue.add(data) message.ack() except Exception as e: e = Except.wrap(e) if not self.target_queue.closed: # EXPECTED TO HAPPEN, THIS THREAD MAY HAVE BEEN AWAY FOR A WHILE raise e else: try: self.pulse_target(data) message.ack() except Exception as e: Log.warning("Problem processing pulse (see `data` in structured log)", data=data, cause=e) def _worker(self, please_stop): def disconnect(): with suppress_exception: self.target_queue.close() Log.note("stop put into queue") self.pulse.disconnect() Log.note("pulse listener was given a disconnect()") please_stop.on_go(disconnect) while not please_stop: try: self.pulse.listen() except Exception as e: if not please_stop: Log.warning("Pulse had problem (Have you set your Pulse permissions correctly?", e) Log.note("pulse listener is done") def __exit__(self, exc_type, exc_val, exc_tb): Log.note("clean pulse exit") self.please_stop.go() with suppress_exception: self.target_queue.close() Log.note("stop put into queue") try: self.pulse.disconnect() except Exception as e: Log.warning("Can not disconnect during pulse exit, ignoring", e) Thread.__exit__(self, exc_type, exc_val, exc_tb) class Publisher(object): """ Mimic GenericPublisher https://github.com/bhearsum/mozillapulse/blob/master/mozillapulse/publishers.py """ @override def __init__( self, exchange, # name of the Pulse exchange host='pulse.mozilla.org', # url to connect, port=5671, # tcp port user=None, password=None, vhost="/", start=0, # USED AS STARTING POINT FOR ASSIGNING THE _meta.count ATTRIBUTE ssl=True, applabel=None, heartbeat=False, # True to also get the Pulse heartbeat message durable=False, # True to keep queue after shutdown serializer='json', broker_timezone='GMT', kwargs=None ): self.settings = kwargs self.connection = None self.count = 0 def connect(self): if not self.connection: self.connection = Connection( hostname=self.settings.host, port=self.settings.port, userid=self.settings.user, password=self.settings.password, virtual_host=self.settings.vhost, ssl=self.settings.ssl ) def disconnect(self): if self.connection: self.connection.release() self.connection = None def send(self, topic, message): """Publishes a pulse message to the proper exchange.""" if not message: Log.error("Expecting a message") message._prepare() if not self.connection: self.connect() producer = Producer( channel=self.connection, exchange=Exchange(self.settings.exchange, type='topic'), routing_key=topic ) # The message is actually a simple envelope format with a payload and # some metadata. final_data = Data( payload=message.data, _meta=set_default({ 'exchange': self.settings.exchange, 'routing_key': message.routing_key, 'serializer': self.settings.serializer, 'sent': time_to_string(datetime.datetime.now(timezone(self.settings.broker_timezone))), 'count': self.count }, message.metadata) ) producer.publish(jsons.scrub(final_data), serializer=self.settings.serializer) self.count += 1 class ModifiedGenericConsumer(GenericConsumer): def _drain_events_loop(self): while True: try: self.connection.drain_events(timeout=self.timeout) except socket_timeout, e: Log.warning("timeout! Restarting {{name}} pulse consumer.", name=self.exchange, cause=e) try: self.disconnect() except Exception as f: Log.warning("Problem with disconnect()", cause=f) break
klahnakoski/MySQL-to-S3
vendor/pyLibrary/env/pulse.py
Python
mpl-2.0
7,554
0.00331
# -*- coding: UTF-8 -*- # /* # * Copyright (C) 2013 Libor Zoubek + jondas # * # * # * This Program is free software; you can redistribute it and/or modify # * it under the terms of the GNU General Public License as published by # * the Free Software Foundation; either version 2, or (at your option) # * any later version. # * # * This Program is distributed in the hope that it will be useful, # * but WITHOUT ANY WARRANTY; without even the implied warranty of # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # * GNU General Public License for more details. # * # * You should have received a copy of the GNU General Public License # * along with this program; see the file COPYING. If not, write to # * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. # * http://www.gnu.org/copyleft/gpl.html # * # */ import sys import xbmcaddon import xbmcutil import util from resources.lib.sosac import SosacContentProvider from resources.lib.sutils import XBMCSosac __scriptid__ = 'plugin.video.sosac.ph' __scriptname__ = 'sosac.ph' __addon__ = xbmcaddon.Addon(id=__scriptid__) __language__ = __addon__.getLocalizedString __set__ = __addon__.getSetting settings = {'downloads': __set__('downloads'), 'quality': __set__('quality'), 'subs': __set__('subs') == 'true', 'add_subscribe': __set__('add_subscribe'), 'force-ch': __set__('force-ch') == 'true', 'force-sort': __set__('force-sort')} reverse_eps = __set__('order-episodes') == '0' force_english = __set__('force-english') == 'true' use_memory_cache = __set__('use-memory-cache') == 'true' util.info("URL: " + sys.argv[2]) params = util.params() if params == {}: xbmcutil.init_usage_reporting(__scriptid__) util.info("Running sosac provider with params: " + str(params)) XBMCSosac(SosacContentProvider(reverse_eps=reverse_eps, force_english=force_english, use_memory_cache=use_memory_cache), settings, __addon__).run(params)
zbyna/plugin.video.sosac.ph
default.py
Python
gpl-2.0
2,056
0.000486
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Getting Things GNOME! - a personal organizer for the GNOME desktop # Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau # # This program is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program. If not, see <http://www.gnu.org/licenses/>. # ----------------------------------------------------------------------------- from gi.repository import Gtk from GTG import _ class PasswordUI(Gtk.Box): '''Widget displaying a gtk.Label and a textbox to input a password''' def __init__(self, req, backend, width): '''Creates the gtk widgets and loads the current password in the text field @param req: a Requester @param backend: a backend object @param width: the width of the Gtk.Label object ''' super(PasswordUI, self).__init__() self.backend = backend self.req = req self._populate_gtk(width) self._load_password() self._connect_signals() def _populate_gtk(self, width): '''Creates the text box and the related label @param width: the width of the Gtk.Label object ''' password_label = Gtk.Label(label=_("Password:")) password_label.set_alignment(xalign=0, yalign=0.5) password_label.set_size_request(width=width, height=-1) self.pack_start(password_label, False, True, 0) align = Gtk.Alignment.new(0, 0.5, 1, 0) align.set_padding(0, 0, 10, 0) self.pack_start(align, True, True, 0) self.password_textbox = Gtk.Entry() align.add(self.password_textbox) def _load_password(self): '''Loads the password from the backend''' password = self.backend.get_parameters()['password'] self.password_textbox.set_invisible_char('*') self.password_textbox.set_visibility(False) self.password_textbox.set_text(password) def _connect_signals(self): '''Connects the gtk signals''' self.password_textbox.connect('changed', self.on_password_modified) def commit_changes(self): '''Saves the changes to the backend parameter ('password')''' password = self.password_textbox.get_text() self.backend.set_parameter('password', password) def on_password_modified(self, sender): ''' Signal callback, executed when the user edits the password. Disables the backend. The user will re-enable it to confirm the changes (s)he made. @param sender: not used, only here for signal compatibility ''' if self.backend.is_enabled() and not self.backend.is_default(): self.req.set_backend_enabled(self.backend.get_id(), False)
partp/gtg-services
GTG/gtk/backends_dialog/parameters_ui/passwordui.py
Python
gpl-3.0
3,286
0
''' Copyright 2013 Cosnita Radu Viorel Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. .. codeauthor:: Radu Viorel Cosnita <radu.cosnita@gmail.com> .. py:module:: fantastico.samples.simple_component.simple_urls ''' from fantastico.mvc.base_controller import BaseController from fantastico.mvc.controller_decorators import ControllerProvider, Controller from webob.response import Response @ControllerProvider() class SampleUrlsController(BaseController): '''This class provides some urls with limited functionality in order to enrich the samples from fantastico framework.''' @Controller(url="/simple-component/foreign-component-reusage") def refence_external_component(self, request): '''This method showcase external component reusage with template overriding. Take a look at the template of this controller.''' print(request.content_type) content = self.load_template("/foreign_component_reusage.html") return Response(content)
rcosnita/fantastico
fantastico/samples/simple_component/simple_urls.py
Python
mit
1,965
0.006107
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Intangible() result.template = "object/intangible/pet/shared_3po_protocol_droid_silver.iff" result.attribute_template_id = -1 result.stfName("","") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
anhstudios/swganh
data/scripts/templates/object/intangible/pet/shared_3po_protocol_droid_silver.py
Python
mit
438
0.047945
""" Write a program that will help you play poker by telling you what kind of hand you have. Input: The first line of input contains the number of test cases (no more than 20). Each test case consists of one line - five space separated cards. Each card is represented by a two-letter (or digit) word. The first character is the rank (A,K,Q,J,T,9,8,7,6,5,4,3 or 2), the second character is the suit (S,H,D,C standing for spades, hearts, diamonds and clubs). The cards can be in any order (but they will not repeat). Output: For each test case output one line describing the type of a hand, exactly like in the list above. """ rank = ['A', 'K', 'Q', 'J', 'T', '9', '8', '7', '6', '5', '4', '3', '2'] suit = ['S', 'H', 'D', 'C'] def validate(val): if len(val) != 5: return False for v in val: if v[0] not in rank or v[1] not in suit: return False return True def deck_sort(inp): d = {'A': 0, 'K': 1, 'Q': 2, 'J': 3, 'T': 4, '9': 5, '8': 6, '7': 7, '6': 8, '5': 9, '4': 10, '3': 11, '2': 12} return sorted(inp, key=lambda x: d[x[0]]) def same_suit(inp): for i in inp: if not i[1] == inp[0][1]: return False return True def same_rank(inp): for i in inp: if i[0] != inp[0][0]: return False return True def consecutive(inp): nxt = '' for i in inp: if not nxt: nxt = rank.index(i[0]) + 1 elif rank[nxt] == i[0]: nxt = rank.index(i[0]) + 1 else: return False return True def test(inp): if royal_flush(inp): print('Royal Flush') elif straight_flush(inp): print('Straight Flush') elif four_of_a_kind(inp): print('Four of a Kind') elif full_house(inp): print('Full House') elif flush(inp): print('Flush') elif straight(inp): print('Straight') elif three_of_a_kind(inp): print('Three of a Kind') elif two_pair(inp): print('Two Pair') elif one_pair(inp): print('One Pair') else: print('"High" Card') def straight_flush(inp): return same_suit(inp) and consecutive(inp) def royal_flush(inp): return straight_flush(inp) and inp[0][0] == 'A' def four_of_a_kind(inp): return (same_rank(inp[:4])) or \ (same_rank(inp[1:])) def full_house(inp): return (same_rank(inp[:3]) and same_rank(inp[3:])) or \ (same_rank(inp[:2]) and same_rank(inp[2:])) def flush(inp): return same_suit(inp) def straight(inp): return consecutive(inp) def three_of_a_kind(inp): return (same_rank(inp[0:3])) or \ (same_rank(inp[1:4])) or \ (same_rank(inp[2:5])) def two_pair(inp): return (same_rank(inp[0:2]) and same_rank(inp[2:4])) or \ (same_rank(inp[0:2]) and same_rank(inp[3:5])) or \ (same_rank(inp[1:3]) and same_rank(inp[3:5])) def one_pair(inp): return (same_rank(inp[0:2])) or \ (same_rank(inp[1:3])) or \ (same_rank(inp[2:4])) or \ (same_rank(inp[3:5])) if __name__ == '__main__': number = int(input('Number of inputs: ')) print('Please enter combinations: ') for i in range(number): cards = input('> ').upper().split() if validate(cards): test(deck_sort(cards)) else: print('invalid input')
DayGitH/Python-Challenges
DailyProgrammer/20120330B.py
Python
mit
3,384
0.001773
#!/usr/bin/env python import os import shutil # def main(src, dst, symlinks=False, ignore=None): # """Main entry point for the script.""" # copytree(src, dst, symlinks=False, ignore=None) def copytree(src, dst, symlinks=False, ignore=None): for item in os.listdir(src): s = os.path.join(src, item) d = os.path.join(dst, item) if os.path.isdir(s): shutil.copytree(s, d, symlinks, ignore) else: shutil.copy2(s, d) # # calling main # if __name__ == '__main__': # import sys # src = sys.argv[1] # dst = sys.argv[2] # symlinks = sys.argv[3] # ignore = sys.argv[4] # main(src, dst, symlinks, ignore)
joelfiddes/toposubv2
topoMAPP/utils/copytree.py
Python
gpl-3.0
724
0.002762
import os import re import sys import logging from coalib.collecting.Collectors import ( collect_all_bears_from_sections, filter_section_bears_by_languages) from coalib.misc import Constants from coalib.output.ConfWriter import ConfWriter from coalib.output.printers.LOG_LEVEL import LOG_LEVEL from coalib.parsing.CliParsing import parse_cli, check_conflicts from coalib.parsing.ConfParser import ConfParser from coalib.settings.Section import Section, extract_aspects_from_section from coalib.settings.SectionFilling import fill_settings from coalib.settings.Setting import Setting, path from string import Template COAFILE_OUTPUT = Template('$type \'$file\' $found!\n' 'Here\'s what you can do:\n' '* add `--save` to generate a config file with ' 'your current options\n' '* add `-I` to suppress any use of config files\n') def aspectize_sections(sections): """ Search for aspects related setting in a section, initialize it, and then embed the aspects information as AspectList object into the section itself. :param sections: List of section that potentially contain aspects setting. :return: The new sections. """ for section_name, section in sections.items(): section.aspects = extract_aspects_from_section(section) if section.aspects is not None and len(section.get('bears')): logging.warning("'aspects' and 'bears' configuration is detected " "in section '{}'. Aspect-based configuration will " 'takes priority and will overwrite any ' 'explicitly listed bears'.format(section_name)) return sections def merge_section_dicts(lower, higher): """ Merges the section dictionaries. The values of higher will take precedence over the ones of lower. Lower will hold the modified dict in the end. :param lower: A section. :param higher: A section which values will take precedence over the ones from the other. :return: The merged dict. """ for name in higher: if name in lower: lower[name].update(higher[name], ignore_defaults=True) else: # no deep copy needed lower[name] = higher[name] return lower def load_config_file(filename, log_printer, silent=False): """ Loads sections from a config file. Prints an appropriate warning if it doesn't exist and returns a section dict containing an empty default section in that case. It assumes that the cli_sections are available. :param filename: The file to load settings from. :param log_printer: The log printer to log the warning/error to (in case). :param silent: Whether or not to warn the user/exit if the file doesn't exist. :raises SystemExit: Exits when the given filename is invalid and is not the default coafile. Only raised when ``silent`` is ``False``. """ filename = os.path.abspath(filename) try: return ConfParser().parse(filename) except FileNotFoundError: if not silent: if os.path.basename(filename) == Constants.default_coafile: log_printer.warn(COAFILE_OUTPUT .substitute(type='Default coafile', file=Constants.default_coafile, found='not found')) else: log_printer.err(COAFILE_OUTPUT .substitute(type='Requested coafile', file=filename, found='does not exist')) sys.exit(2) return {'default': Section('default')} def save_sections(sections): """ Saves the given sections if they are to be saved. :param sections: A section dict. """ default_section = sections['cli'] try: if bool(default_section.get('save', 'false')): conf_writer = ConfWriter( str(default_section.get('config', Constants.default_coafile))) else: return except ValueError: conf_writer = ConfWriter(str(default_section.get('save', '.coafile'))) conf_writer.write_sections(sections) conf_writer.close() def warn_nonexistent_targets(targets, sections, log_printer): """ Prints out a warning on the given log printer for all targets that are not existent within the given sections. :param targets: The targets to check. :param sections: The sections to search. (Dict.) :param log_printer: The log printer to warn to. """ for target in targets: if target not in sections: log_printer.warn( "The requested section '{section}' is not existent. " 'Thus it cannot be executed.'.format(section=target)) # Can't be summarized as python will evaluate conditions lazily, those # functions have intended side effects though. files_config_absent = warn_config_absent(sections, 'files', log_printer) bears_config_absent = warn_config_absent(sections, 'bears', log_printer) if files_config_absent or bears_config_absent: raise SystemExit(2) # Invalid CLI options provided def warn_config_absent(sections, argument, log_printer): """ Checks if the given argument is present somewhere in the sections and emits a warning that code analysis can not be run without it. :param sections: A dictionary of sections. :param argument: The argument to check for, e.g. "files". :param log_printer: A log printer to emit the warning to. :return: Returns a boolean True if the given argument is present in the sections, else returns False. """ if all(argument not in section for section in sections.values()): log_printer.warn('coala will not run any analysis. Did you forget ' 'to give the `--{}` argument?'.format(argument)) return True return False def load_configuration(arg_list, log_printer, arg_parser=None, args=None): """ Parses the CLI args and loads the config file accordingly, taking default_coafile and the users .coarc into account. :param arg_list: The list of CLI arguments. :param log_printer: The LogPrinter object for logging. :param arg_parser: An ``argparse.ArgumentParser`` instance used for parsing the CLI arguments. :param args: Alternative pre-parsed CLI arguments. :return: A tuple holding (log_printer: LogPrinter, sections: dict(str, Section), targets: list(str)). (Types indicated after colon.) """ cli_sections = parse_cli(arg_list=arg_list, arg_parser=arg_parser, args=args) check_conflicts(cli_sections) if ( bool(cli_sections['cli'].get('find_config', 'False')) and str(cli_sections['cli'].get('config')) == ''): cli_sections['cli'].add_or_create_setting( Setting('config', re.escape(find_user_config(os.getcwd())))) targets = [] # We don't want to store targets argument back to file, thus remove it for item in list(cli_sections['cli'].contents.pop('targets', '')): targets.append(item.lower()) if bool(cli_sections['cli'].get('no_config', 'False')): sections = cli_sections else: base_sections = load_config_file(Constants.system_coafile, log_printer) user_sections = load_config_file( Constants.user_coafile, log_printer, silent=True) default_config = str(base_sections['default'].get('config', '.coafile')) user_config = str(user_sections['default'].get( 'config', default_config)) config = os.path.abspath( str(cli_sections['cli'].get('config', user_config))) try: save = bool(cli_sections['cli'].get('save', 'False')) except ValueError: # A file is deposited for the save parameter, means we want to save # but to a specific file. save = True coafile_sections = load_config_file(config, log_printer, silent=save) sections = merge_section_dicts(base_sections, user_sections) sections = merge_section_dicts(sections, coafile_sections) if 'cli' in sections: logging.warning('\'cli\' is an internally reserved section name. ' 'It may have been generated into your coafile ' 'while running coala with `--save`. The settings ' 'in that section will inherit implicitly to all ' 'sections as defaults just like CLI args do.' 'Please change the name of that section in your ' 'coafile to avoid any unexpected behavior.') sections = merge_section_dicts(sections, cli_sections) for name, section in list(sections.items()): section.set_default_section(sections) if name == 'default': if section.contents: logging.warning('Implicit \'Default\' section inheritance is ' 'deprecated. It will be removed soon. To ' 'silence this warning remove settings in the ' '\'Default\' section from your coafile. You ' 'can use dots to specify inheritance: the ' 'section \'all.python\' will inherit all ' 'settings from \'all\'.') sections['default'].update(sections['cli']) sections['default'].name = 'cli' sections['cli'] = sections['default'] del sections['default'] str_log_level = str(sections['cli'].get('log_level', '')).upper() log_printer.log_level = LOG_LEVEL.str_dict.get(str_log_level, LOG_LEVEL.INFO) return sections, targets def find_user_config(file_path, max_trials=10): """ Uses the filepath to find the most suitable user config file for the file by going down one directory at a time and finding config files there. :param file_path: The path of the file whose user config needs to be found :param max_trials: The maximum number of directories to go down to. :return: The config file's path, empty string if none was found """ file_path = os.path.normpath(os.path.abspath(os.path.expanduser( file_path))) old_dir = None base_dir = (file_path if os.path.isdir(file_path) else os.path.dirname(file_path)) home_dir = os.path.expanduser('~') while base_dir != old_dir and old_dir != home_dir and max_trials != 0: config_file = os.path.join(base_dir, '.coafile') if os.path.isfile(config_file): return config_file old_dir = base_dir base_dir = os.path.dirname(old_dir) max_trials = max_trials - 1 return '' def get_config_directory(section): """ Retrieves the configuration directory for the given section. Given an empty section: >>> section = Section("name") The configuration directory is not defined and will therefore fallback to the current directory: >>> get_config_directory(section) == os.path.abspath(".") True If the ``files`` setting is given with an originating coafile, the directory of the coafile will be assumed the configuration directory: >>> section.append(Setting("files", "**", origin="/tmp/.coafile")) >>> get_config_directory(section) == os.path.abspath('/tmp/') True However if its origin is already a directory this will be preserved: >>> files = section['files'] >>> files.origin = os.path.abspath('/tmp/dir/') >>> section.append(files) >>> os.makedirs(section['files'].origin, exist_ok=True) >>> get_config_directory(section) == section['files'].origin True The user can manually set a project directory with the ``project_dir`` setting: >>> section.append(Setting('project_dir', os.path.abspath('/tmp'), '/')) >>> get_config_directory(section) == os.path.abspath('/tmp') True If no section is given, the current directory is returned: >>> get_config_directory(None) == os.path.abspath(".") True To summarize, the config directory will be chosen by the following priorities if possible in that order: - the ``project_dir`` setting - the origin of the ``files`` setting, if it's a directory - the directory of the origin of the ``files`` setting - the current directory :param section: The section to inspect. :return: The directory where the project is lying. """ if section is None: return os.getcwd() if 'project_dir' in section: return path(section.get('project_dir')) config = os.path.abspath(section.get('files', '').origin) return config if os.path.isdir(config) else os.path.dirname(config) def get_all_bears(log_printer, arg_parser=None): """ :param log_printer: The log_printer to handle logging. :param arg_parser: An ``ArgParser`` object. :return: Tuple containing dictionaries of local bears and global bears. """ sections, _ = load_configuration(arg_list=None, log_printer=log_printer, arg_parser=arg_parser) local_bears, global_bears = collect_all_bears_from_sections( sections, log_printer) return local_bears, global_bears def get_filtered_bears(languages, log_printer, arg_parser=None): """ :param languages: List of languages. :param log_printer: The log_printer to handle logging. :param arg_parser: An ``ArgParser`` object. :return: Tuple containing dictionaries of local bears and global bears. """ local_bears, global_bears = get_all_bears(log_printer, arg_parser) if languages: local_bears = filter_section_bears_by_languages( local_bears, languages) global_bears = filter_section_bears_by_languages( global_bears, languages) return local_bears, global_bears def gather_configuration(acquire_settings, log_printer, arg_list=None, arg_parser=None, args=None): """ Loads all configuration files, retrieves bears and all needed settings, saves back if needed and warns about non-existent targets. This function: - Reads and merges all settings in sections from - Default config - User config - Configuration file - CLI - Collects all the bears - Fills up all needed settings - Writes back the new sections to the configuration file if needed - Gives all information back to caller :param acquire_settings: The method to use for requesting settings. It will get a parameter which is a dictionary with the settings name as key and a list containing a description in [0] and the names of the bears who need this setting in all following indexes. :param log_printer: The log printer to use for logging. The log level will be adjusted to the one given by the section. :param arg_list: CLI args to use :param arg_parser: Instance of ArgParser that is used to parse none-setting arguments. :param args: Alernative pre-parsed CLI arguments. :return: A tuple with the following contents: - A dictionary with the sections - Dictionary of list of local bears for each section - Dictionary of list of global bears for each section - The targets list """ if args is None: # Note: arg_list can also be []. Hence we cannot use # `arg_list = arg_list or default_list` arg_list = sys.argv[1:] if arg_list is None else arg_list sections, targets = load_configuration(arg_list, log_printer, arg_parser, args=args) aspectize_sections(sections) local_bears, global_bears = fill_settings(sections, acquire_settings, log_printer) save_sections(sections) warn_nonexistent_targets(targets, sections, log_printer) return (sections, local_bears, global_bears, targets)
damngamerz/coala
coalib/settings/ConfigurationGathering.py
Python
agpl-3.0
17,309
0.000116
import elasticsearch import migrates from .test_utils import callmigrates, iterate_test_data, remove_test_data document_count = 1000 def insert_test_data(connection): with migrates.Batch(connection, migrates.Logger()) as batch: for i in range(0, document_count): batch.add({ '_op_type': 'index', '_index': 'migrates_test_reindex', '_type': 'test_' + str(i % 3), '_id': str(i), '_source': {'x': i} }) def validate_test_data(connection, index): docs = set() for document in iterate_test_data(connection, index=index): docs.add(document['_source']['x']) assert len(docs) == document_count def __main__(): logger = migrates.Logger() connection = elasticsearch.Elasticsearch() logger.log('Removing old test data.') remove_test_data(connection) try: logger.log('Inserting new test data.') insert_test_data(connection) logger.log('Reindexing data back into the same index.') callmigrates('reindex migrates_test_reindex -y') logger.log('Validating resulting data.') validate_test_data(connection, index='migrates_test_reindex') logger.log('Reindexing data into a different index.') callmigrates('reindex "migrates_test_reindex=>migrates_test_reindex_2" -y') logger.log('Validating resulting data.') assert not connection.indices.exists('migrates_test_reindex') validate_test_data(connection, index='migrates_test_reindex_2') finally: logger.log('Cleaing up test data.') remove_test_data(connection) if __name__ == '__main__': __main__()
pineapplemachine/migrates
test/test_reindex.py
Python
gpl-3.0
1,758
0.006826
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Tangible() result.template = "object/tangible/ship/crafted/droid_interface/shared_base_droid_interface_subcomponent_mk3.iff" result.attribute_template_id = 8 result.stfName("space_crafting_n","base_droid_interface_subcomponent_mk3") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
anhstudios/swganh
data/scripts/templates/object/tangible/ship/crafted/droid_interface/shared_base_droid_interface_subcomponent_mk3.py
Python
mit
523
0.042065
DB_CONFIG = "postgresql://user:password@localhost/db" ROOT_DIR = "/path/to/project" ADMIN_USER = "username" ADMIN_PW = "password"
miketung168/survey-dashboard
example_config.py
Python
mit
130
0
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class LuaLuafilesystem(Package): """LuaFileSystem is a Lua library developed to complement the set of functions related to file systems offered by the standard Lua distribution. LuaFileSystem offers a portable way to access the underlying directory structure and file attributes. LuaFileSystem is free software and uses the same license as Lua 5.1 """ homepage = 'http://keplerproject.github.io/luafilesystem' url = 'https://github.com/keplerproject/luafilesystem/archive/v1_6_3.tar.gz' version('1_6_3', 'bed11874cfded8b4beed7dd054127b24') # The version constraint here comes from this post: # # https://www.perforce.com/blog/git-beyond-basics-using-shallow-clones # # where it is claimed that full shallow clone support was added @1.9 depends_on('git@1.9.0:', type='build') extends('lua') def install(self, spec, prefix): rockspec_fmt = join_path(self.stage.path, 'luafilesystem-{version.underscored}', 'rockspecs', 'luafilesystem-{version.dotted}-1.rockspec') luarocks('--tree=' + prefix, 'install', rockspec_fmt.format(version=self.spec.version))
lgarren/spack
var/spack/repos/builtin/packages/lua-luafilesystem/package.py
Python
lgpl-2.1
2,512
0.000398
#!/usr/bin/env python """interpret a comapct grid specification using regex""" import re # use a compact regular expression with nested OR expressions, # and hence many groups, but name the outer (main) groups: real_short1 = \ r'\s*(?P<lower>-?(\d+(\.\d*)?|\d*\.\d+)([eE][+\-]?\d+)?)\s*' real_short2 = \ r'\s*(?P<upper>-?(\d+(\.\d*)?|\d*\.\d+)([eE][+\-]?\d+)?)\s*' # regex for real interval [a,b] : domain = r'\[' + real_short1 + ',' + real_short2 + r'\]' # regex for integer interval [a:b] : indices = r'\[\s*(-?\d+)\s*:\s*(-?\d+)\s*\]' # test: examples = ('domain=[0,10] indices=[0:11]', 'domain=[0.1,1.1]x[0,2E+00] indices=[1:21]x[1:101]', '[0,1]x[0,2]x[-1,1.5] [1:21]x[1:11]x[-10:15]') for ex in examples: print re.findall(indices, ex) # a nested list is returned; requires nested group counting print re.findall(domain, ex) print # work with compiled expressions and the groupindex dictionary to # extract the named groups easily from the nested list that is # returned from re.findall: print 'work with groupindex:' for ex in examples: print re.findall(indices, ex) c = re.compile(domain) groups = c.findall(ex) intervals = [] for i in range(len(groups)): intervals.append( (groups[i][c.groupindex['lower']-1], groups[i][c.groupindex['upper']-1])) print intervals print # work with non-capturing parenthesis of the form (?:pattern) real_short1 = \ r'\s*(?P<lower>-?(?:\d+(?:\.\d*)?|\d*\.\d+)(?:[eE][+\-]?\d+)?)\s*' real_short2 = \ r'\s*(?P<upper>-?(?:\d+(?:\.\d*)?|\d*\.\d+)(?:[eE][+\-]?\d+)?)\s*' # regex for real interval [a,b] : domain = r'\[' + real_short1 + ',' + real_short2 + r'\]' print 'non-capturing groups:' for ex in examples: print re.findall(domain, ex) print # avoid parenthesis, i.e., nested OR expressions: real_sn = r'-?\d\.?\d*[Ee][+\-][0-9]+' real_dn = r'-?\d*\.\d*' real_in = r'-?\d+' real1 = \ r'\s*(?P<lower>' + real_sn + '|' + real_dn + '|' + real_in + ')\s*' real2 = \ r'\s*(?P<upper>' + real_sn + '|' + real_dn + '|' + real_in + ')\s*' # regex for real interval [a,b] : domain = r'\[' + real1 + ',' + real2 + r'\]' # regex for integer interval [a:b] : indices = r'\[\s*(-?\d+)\s*:\s*(-?\d+)\s*\]' print '\navoid so many parenthesis (just two groups now for each interval):' for ex in examples: print re.findall(indices, ex) print re.findall(domain, ex) print # much simpler _working_ versions: domain = r'\[([^,]*),([^\]]*)\]' indices = r'\[([^:,]*):([^\]]*)\]' print '\nsimpler regular expressions:\n', domain, indices for ex in examples: print re.findall(indices, ex) print re.findall(domain, ex) print # these give wrong results domain = r'\[(.*?),(.*?)\]' indices = r'\[(.*?):(.*?)\]' print '\nalternative; simpler regular expressions:\n', domain, indices for ex in examples: print re.findall(indices, ex) print re.findall(domain, ex) print
sniemi/SamPy
sandbox/src1/TCSE3-3rd-examples/src/py/regex/fdmgrid.py
Python
bsd-2-clause
2,941
0.00136
#!/usr/bin/python # -*- coding: utf-8 -*- # ----------------------------------------------- # ----> Computer Aided Optical Analysis <---- # ----------------------------------------------- # (c) 2015 by Swissatest Testmaterialien AG # http://www.swissatest.ch # ----------------------------------------------- # Developeded 2015 by __author__ = 'Raoul René Melcer' # raoul.rene.melcer@webservices-consulting.ch # http://webservices-consulting.ch # ----------------------------------------------- # License: Apache 2 # http://www.apache.org/licenses/LICENSE-2.0 # ----------------------------------------------- # File: credits.py __date__ = '$03.09.2015 11:30:42$' # https://github.com/rrmelcer/swissatest-analysis # ----------------------------------------------- # Descripion: # Show the application credits in GUI # ----------------------------------------------- import logging from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Credits(QtGui.QWidget): def __init__(self): QtGui.QWidget.__init__(self) self.setupUi(self) logging.debug('{0} module init'.format(self.__class__)) def __del__(self): logging.debug('{0} module del'.format(self.__class__)) def setupUi(self, dia_credits): dia_credits.setObjectName(_fromUtf8("dia_credits")) dia_credits.resize(420, 800) dia_credits.setMinimumSize(QtCore.QSize(420, 700)) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/doc/img/icons/icon.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off) dia_credits.setWindowIcon(icon) dia_credits.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates)) self.gridLayout = QtGui.QGridLayout(dia_credits) self.gridLayout.setObjectName(_fromUtf8("gridLayout")) self.label_4 = QtGui.QLabel(dia_credits) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_4.setFont(font) self.label_4.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.label_4.setOpenExternalLinks(True) self.label_4.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse) self.label_4.setObjectName(_fromUtf8("label_4")) self.gridLayout.addWidget(self.label_4, 12, 1, 1, 1) self.line_4 = QtGui.QFrame(dia_credits) self.line_4.setFrameShape(QtGui.QFrame.HLine) self.line_4.setFrameShadow(QtGui.QFrame.Sunken) self.line_4.setObjectName(_fromUtf8("line_4")) self.gridLayout.addWidget(self.line_4, 5, 0, 1, 2) self.label_3 = QtGui.QLabel(dia_credits) self.label_3.setOpenExternalLinks(True) self.label_3.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse) self.label_3.setObjectName(_fromUtf8("label_3")) self.gridLayout.addWidget(self.label_3, 3, 1, 1, 1) self.label_2 = QtGui.QLabel(dia_credits) self.label_2.setMaximumSize(QtCore.QSize(50, 50)) self.label_2.setText(_fromUtf8("")) self.label_2.setPixmap(QtGui.QPixmap(_fromUtf8(":/logos/doc/img/logos/users.png"))) self.label_2.setScaledContents(True) self.label_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.label_2.setObjectName(_fromUtf8("label_2")) self.gridLayout.addWidget(self.label_2, 3, 0, 1, 1) self.line = QtGui.QFrame(dia_credits) self.line.setFrameShape(QtGui.QFrame.HLine) self.line.setFrameShadow(QtGui.QFrame.Sunken) self.line.setObjectName(_fromUtf8("line")) self.gridLayout.addWidget(self.line, 2, 0, 1, 2) self.gridLayout_2 = QtGui.QGridLayout() self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2")) spacerItem = QtGui.QSpacerItem(458, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.gridLayout_2.addItem(spacerItem, 0, 0, 1, 1) self.btn_close_credits = QtGui.QPushButton(dia_credits) icon = QtGui.QIcon.fromTheme(_fromUtf8("window-close")) self.btn_close_credits.setIcon(icon) self.btn_close_credits.setFlat(False) self.btn_close_credits.setObjectName(_fromUtf8("btn_close_credits")) self.gridLayout_2.addWidget(self.btn_close_credits, 0, 1, 1, 1) self.gridLayout.addLayout(self.gridLayout_2, 15, 0, 1, 2) self.line_2 = QtGui.QFrame(dia_credits) self.line_2.setFrameShape(QtGui.QFrame.HLine) self.line_2.setFrameShadow(QtGui.QFrame.Sunken) self.line_2.setObjectName(_fromUtf8("line_2")) self.gridLayout.addWidget(self.line_2, 8, 0, 1, 2) self.label_8 = QtGui.QLabel(dia_credits) self.label_8.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.label_8.setOpenExternalLinks(True) self.label_8.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse) self.label_8.setObjectName(_fromUtf8("label_8")) self.gridLayout.addWidget(self.label_8, 0, 1, 1, 1) self.label_7 = QtGui.QLabel(dia_credits) self.label_7.setMaximumSize(QtCore.QSize(50, 50)) self.label_7.setText(_fromUtf8("")) self.label_7.setPixmap(QtGui.QPixmap(_fromUtf8(":/logos/doc/img/logos/licence.png"))) self.label_7.setScaledContents(True) self.label_7.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.label_7.setObjectName(_fromUtf8("label_7")) self.gridLayout.addWidget(self.label_7, 9, 0, 1, 1) self.label = QtGui.QLabel(dia_credits) self.label.setFrameShadow(QtGui.QFrame.Plain) self.label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.label.setOpenExternalLinks(True) self.label.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse) self.label.setObjectName(_fromUtf8("label")) self.gridLayout.addWidget(self.label, 6, 1, 1, 1) self.label_9 = QtGui.QLabel(dia_credits) self.label_9.setEnabled(True) self.label_9.setMaximumSize(QtCore.QSize(50, 50)) self.label_9.setText(_fromUtf8("")) self.label_9.setPixmap(QtGui.QPixmap(_fromUtf8(":/logos/doc/img/logos/applications.png"))) self.label_9.setScaledContents(True) self.label_9.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.label_9.setObjectName(_fromUtf8("label_9")) self.gridLayout.addWidget(self.label_9, 0, 0, 1, 1) self.line_3 = QtGui.QFrame(dia_credits) self.line_3.setFrameShape(QtGui.QFrame.HLine) self.line_3.setFrameShadow(QtGui.QFrame.Sunken) self.line_3.setObjectName(_fromUtf8("line_3")) self.gridLayout.addWidget(self.line_3, 11, 0, 1, 2) self.label_5 = QtGui.QLabel(dia_credits) self.label_5.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.label_5.setObjectName(_fromUtf8("label_5")) self.gridLayout.addWidget(self.label_5, 9, 1, 1, 1) self.label_10 = QtGui.QLabel(dia_credits) self.label_10.setMaximumSize(QtCore.QSize(50, 50)) self.label_10.setText(_fromUtf8("")) self.label_10.setPixmap(QtGui.QPixmap(_fromUtf8(":/logos/doc/img/logos/python-logo.png"))) self.label_10.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.label_10.setObjectName(_fromUtf8("label_10")) self.gridLayout.addWidget(self.label_10, 12, 0, 1, 1) self.table_libs = QtGui.QTableWidget(dia_credits) self.table_libs.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) self.table_libs.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers) self.table_libs.setAlternatingRowColors(True) self.table_libs.setObjectName(_fromUtf8("table_libs")) self.table_libs.setColumnCount(3) self.table_libs.setRowCount(8) item = QtGui.QTableWidgetItem() self.table_libs.setVerticalHeaderItem(0, item) item = QtGui.QTableWidgetItem() self.table_libs.setVerticalHeaderItem(1, item) item = QtGui.QTableWidgetItem() self.table_libs.setVerticalHeaderItem(2, item) item = QtGui.QTableWidgetItem() self.table_libs.setVerticalHeaderItem(3, item) item = QtGui.QTableWidgetItem() self.table_libs.setVerticalHeaderItem(4, item) item = QtGui.QTableWidgetItem() self.table_libs.setVerticalHeaderItem(5, item) item = QtGui.QTableWidgetItem() self.table_libs.setVerticalHeaderItem(6, item) item = QtGui.QTableWidgetItem() self.table_libs.setVerticalHeaderItem(7, item) item = QtGui.QTableWidgetItem() icon = QtGui.QIcon.fromTheme(_fromUtf8("application-x-executable")) item.setIcon(icon) self.table_libs.setHorizontalHeaderItem(0, item) item = QtGui.QTableWidgetItem() icon = QtGui.QIcon.fromTheme(_fromUtf8("applications-development")) item.setIcon(icon) self.table_libs.setHorizontalHeaderItem(1, item) item = QtGui.QTableWidgetItem() icon = QtGui.QIcon.fromTheme(_fromUtf8("applications-internet")) item.setIcon(icon) self.table_libs.setHorizontalHeaderItem(2, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(0, 0, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(0, 1, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(0, 2, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(1, 0, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(1, 1, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(1, 2, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(2, 0, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(2, 1, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(2, 2, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(3, 0, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(3, 1, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(3, 2, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(4, 0, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(4, 1, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(4, 2, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(5, 0, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(5, 1, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(5, 2, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(6, 0, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(6, 1, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(6, 2, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(7, 0, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(7, 1, item) item = QtGui.QTableWidgetItem() self.table_libs.setItem(7, 2, item) self.table_libs.horizontalHeader().setStretchLastSection(True) self.table_libs.verticalHeader().setVisible(False) self.table_libs.verticalHeader().setStretchLastSection(False) self.gridLayout.addWidget(self.table_libs, 14, 0, 1, 2) self.label_6 = QtGui.QLabel(dia_credits) self.label_6.setMaximumSize(QtCore.QSize(50, 50)) self.label_6.setText(_fromUtf8("")) self.label_6.setPixmap(QtGui.QPixmap(_fromUtf8(":/logos/doc/img/logos/preferences.png"))) self.label_6.setScaledContents(True) self.label_6.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop) self.label_6.setObjectName(_fromUtf8("label_6")) self.gridLayout.addWidget(self.label_6, 6, 0, 1, 1) self.retranslateUi(dia_credits) QtCore.QObject.connect(self.btn_close_credits, QtCore.SIGNAL(_fromUtf8("clicked()")), dia_credits.close) QtCore.QMetaObject.connectSlotsByName(dia_credits) def retranslateUi(self, dia_credits): dia_credits.setWindowTitle(_translate("dia_credits", "Credits", None)) self.label_4.setText(_translate("dia_credits", "Developet With This Libraries", None)) self.label_3.setText(_translate("dia_credits", "<html><head/><body><p><span style=\" font-weight:600;\">Copyright © 2015 by Swissatest Testmaterialien AG</span></p><p>Swissatest Testmaterialen AG<br/>Mövenstrase 12<br/>CH - 9015 St. Gallen<br/>Switzerland</p><p><a href=\"http://swisatest.ch\"><span style=\" text-decoration: underline; color:#0000ff;\">http://swissatest.ch</span></a><br/><a href=\"mailto:analysis@swissatest.ch\"><span style=\" text-decoration: underline; color:#0000ff;\">analysis@swissatest.ch</span></a></p></body></html>", None)) self.btn_close_credits.setToolTip(_translate("dia_credits", "Close Window", None)) self.btn_close_credits.setText(_translate("dia_credits", "Close", None)) self.label_8.setText(_translate("dia_credits", "<html><head/><body><p><span style=\" font-weight:600;\">CAOA - Computer Aided Crafical Analysis</span></p><p>Version 0.1<br/>Prototype GUI - Look And Feel Test<br><a href=\"https://github.com/rrmelcer/swissatest-analysis\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/rrmelcer/swissatest-analysis</span></a></p></body></html>", None)) self.label.setText(_translate("dia_credits", "<html><head/><body><p><span style=\" font-weight:600;\">Developed 2015 by Raoul René Melcer</span></p><p>Web Services Consulting Melcer<br/>Raoul René Melcer<br/>Gotthelfstrasse 85<br/>CH - 8472 Seuzach<br/>Switzerland</p><p><a href=\"http://webservices-consulting.ch\"><span style=\" text-decoration: underline; color:#0000ff;\">http://webservices-consulting.ch</span></a><br/><a href=\"mailto: raoul.rene.melcer@webservices-consulting.ch\"><span style=\" text-decoration: underline; color:#0000ff;\">raoul.rene.melcer@webservices-consulting.ch</span></a></p></body></html>", None)) self.label_5.setText(_translate("dia_credits", "<html><head/><body><p><span style=\" font-weight:600;\">Apache License, Version 2.0</span></p><p><a href=\"http://www.apache.org/licenses/LICENSE-2.0\"><span style=\" text-decoration: underline; color:#0000ff;\">http://www.apache.org/licenses/LICENSE-2.0</span></a></p></body></html>", None)) item = self.table_libs.verticalHeaderItem(0) item.setText(_translate("dia_credits", "01", None)) item = self.table_libs.verticalHeaderItem(1) item.setText(_translate("dia_credits", "02", None)) item = self.table_libs.verticalHeaderItem(2) item.setText(_translate("dia_credits", "03", None)) item = self.table_libs.verticalHeaderItem(3) item.setText(_translate("dia_credits", "04", None)) item = self.table_libs.verticalHeaderItem(4) item.setText(_translate("dia_credits", "05", None)) item = self.table_libs.verticalHeaderItem(5) item.setText(_translate("dia_credits", "06", None)) item = self.table_libs.verticalHeaderItem(6) item.setText(_translate("dia_credits", "07", None)) item = self.table_libs.verticalHeaderItem(7) item.setText(_translate("dia_credits", "08", None)) item = self.table_libs.horizontalHeaderItem(0) item.setText(_translate("dia_credits", "Name", None)) item = self.table_libs.horizontalHeaderItem(1) item.setText(_translate("dia_credits", "Version", None)) item = self.table_libs.horizontalHeaderItem(2) item.setText(_translate("dia_credits", "Website", None)) __sortingEnabled = self.table_libs.isSortingEnabled() self.table_libs.setSortingEnabled(False) item = self.table_libs.item(0, 0) item.setText(_translate("dia_credits", "Python", None)) item = self.table_libs.item(0, 1) item.setText(_translate("dia_credits", "2.7.9", None)) item = self.table_libs.item(0, 2) item.setText(_translate("dia_credits", "https://www.python.org/", None)) item = self.table_libs.item(1, 0) item.setText(_translate("dia_credits", "Qt", None)) item = self.table_libs.item(1, 1) item.setText(_translate("dia_credits", "4", None)) item = self.table_libs.item(1, 2) item.setText(_translate("dia_credits", "http://qt-project.org/", None)) item = self.table_libs.item(2, 0) item.setText(_translate("dia_credits", "PyQt", None)) item = self.table_libs.item(2, 1) item.setText(_translate("dia_credits", "4", None)) item = self.table_libs.item(2, 2) item.setText(_translate("dia_credits", "http://www.riverbankcomputing.com/software/pyqt/intro", None)) item = self.table_libs.item(3, 0) item.setText(_translate("dia_credits", "PyYAML", None)) item = self.table_libs.item(3, 1) item.setText(_translate("dia_credits", "3.11", None)) item = self.table_libs.item(3, 2) item.setText(_translate("dia_credits", "http://pyyaml.org/", None)) item = self.table_libs.item(4, 0) item.setText(_translate("dia_credits", "YAPSY", None)) item = self.table_libs.item(4, 1) item.setText(_translate("dia_credits", "1.10", None)) item = self.table_libs.item(4, 2) item.setText(_translate("dia_credits", "http://yapsy.sourceforge.net/", None)) item = self.table_libs.item(5, 0) item.setText(_translate("dia_credits", "SQLAlchemy", None)) item = self.table_libs.item(5, 1) item.setText(_translate("dia_credits", "0.9", None)) item = self.table_libs.item(5, 2) item.setText(_translate("dia_credits", "http://www.sqlalchemy.org/", None)) item = self.table_libs.item(6, 0) item.setText(_translate("dia_credits", "SQLSoup", None)) item = self.table_libs.item(6, 1) item.setText(_translate("dia_credits", "0.9", None)) item = self.table_libs.item(6, 2) item.setText(_translate("dia_credits", "https://sqlsoup.readthedocs.org/en/latest/", None)) item = self.table_libs.item(7, 0) item.setText(_translate("dia_credits", "NumPy", None)) item = self.table_libs.item(7, 1) item.setText(_translate("dia_credits", "1.9", None)) item = self.table_libs.item(7, 2) item.setText(_translate("dia_credits", "http://www.numpy.org/", None)) self.table_libs.setSortingEnabled(__sortingEnabled) import guiqtimglib_rc def main(): logging.info('single run: {0}'.format(__name__)) if __name__ == '__main__': main()
rrmelcer/swissatest-analysis
gui/qt4/credits.py
Python
apache-2.0
19,884
0.004024
# # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # from uitest.framework import UITestCase import time class tdf92611(UITestCase): def test_launch_and_close_bibliography(self): self.ui_test.create_doc_in_start_center("writer") self.xUITest.executeCommand(".uno:BibliographyComponent") time.sleep(2) self.xUITest.executeCommand(".uno:CloseWin") time.sleep(2) self.ui_test.close_doc() # vim: set shiftwidth=4 softtabstop=4 expandtab:
beppec56/core
uitest/writer_tests/tdf92611.py
Python
gpl-3.0
647
0.003091
""" pip.vendor is for vendoring dependencies of pip to prevent needing pip to depend on something external. Files inside of pip.vendor should be considered immutable and should only be updated to versions from upstream. """ from __future__ import absolute_import
piyush82/icclab-rcb-web
virtualenv/lib/python2.7/site-packages/pip/vendor/__init__.py
Python
apache-2.0
264
0
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import uuid import socket import time __appname__ = "pymessage" __author__ = "Marco Sirabella, Owen Davies" __copyright__ = "" __credits__ = "Marco Sirabella, Owen Davies" __license__ = "new BSD 3-Clause" __version__ = "0.0.3" __maintainers__ = "Marco Sirabella, Owen Davies" __email__ = "msirabel@gmail.com, dabmancer@dread.life" __status__ = "Prototype" __module__ = "" address = ('localhost', 5350) lguid = '0' def connect(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(address) sock.send((hex(uuid.getnode()) + '\n').encode() + bytes(False)) # ik this is such BAD CODE print("sent") sock.send(lguid.encode()) print('sent latest guid: {}'.format(lguid)) # contents = "latest guid +5: {}".format(lguid + '5') msg = True fullmsg = '' while msg: msg = sock.recv(16).decode() # low byte count for whatever reason #print('mes rec: {}'.format(msg)) fullmsg += msg print('received message: {}'.format(fullmsg)) sock.close() connect()
mjsir911/pymessage
client.py
Python
bsd-3-clause
1,124
0.010676
#!/usr/bin/python # This file is part of tcollector. # Copyright (C) 2011 The tcollector Authors. # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or (at your # option) any later version. This program is distributed in the hope that it # will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty # of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser # General Public License for more details. You should have received a copy # of the GNU Lesser General Public License along with this program. If not, # see <http://www.gnu.org/licenses/>. # Note: I spent many hours reading the Linux kernel's source code to infer the # exact meaning of some of the obscure but useful metrics it exposes. The # description of the metrics are correct to the best of my knowledge, but it's # not always to make sense of the Linux kernel's code. Please report any # inaccuracy you find. -- tsuna. """Socket allocation and network statistics for TSDB. Metrics from /proc/net/sockstat: - net.sockstat.num_sockets: Number of sockets allocated (only TCP). - net.sockstat.num_timewait: Number of TCP sockets currently in TIME_WAIT state. - net.sockstat.sockets_inuse: Number of sockets in use (TCP/UDP/raw). - net.sockstat.num_orphans: Number of orphan TCP sockets (not attached to any file descriptor). - net.sockstat.memory: Memory allocated for this socket type (in bytes). - net.sockstat.ipfragqueues: Number of IP flows for which there are currently fragments queued for reassembly. Metrics from /proc/net/netstat (`netstat -s' command): - net.stat.tcp.abort: Number of connections that the kernel had to abort. type=memory is especially bad, the kernel had to drop a connection due to having too many orphaned sockets. Other types are normal (e.g. timeout). - net.stat.tcp.abort.failed: Number of times the kernel failed to abort a connection because it didn't even have enough memory to reset it (bad). - net.stat.tcp.congestion.recovery: Number of times the kernel detected spurious retransmits and was able to recover part or all of the CWND. - net.stat.tcp.delayedack: Number of delayed ACKs sent of different types. - net.stat.tcp.failed_accept: Number of times a connection had to be dropped after the 3WHS. reason=full_acceptq indicates that the application isn't accepting connections fast enough. You should see SYN cookies too. - net.stat.tcp.invalid_sack: Number of invalid SACKs we saw of diff types. (requires Linux v2.6.24-rc1 or newer) - net.stat.tcp.memory.pressure: Number of times a socket entered the "memory pressure" mode (not great). - net.stat.tcp.memory.prune: Number of times a socket had to discard received data due to low memory conditions (bad). - net.stat.tcp.packetloss.recovery: Number of times we recovered from packet loss by type of recovery (e.g. fast retransmit vs SACK). - net.stat.tcp.receive.queue.full: Number of times a received packet had to be dropped because the socket's receive queue was full. (requires Linux v2.6.34-rc2 or newer) - net.stat.tcp.reording: Number of times we detected re-ordering and how. - net.stat.tcp.syncookies: SYN cookies (both sent & received). """ import re import resource import sys import time from collectors.lib import utils def main(): """Main loop""" sys.stdin.close() interval = 15 page_size = resource.getpagesize() try: sockstat = open("/proc/net/sockstat") netstat = open("/proc/net/netstat") snmp = open("/proc/net/snmp") except IOError, e: print >>sys.stderr, "open failed: %s" % e return 13 # Ask tcollector to not re-start us. utils.drop_privileges() # Note: up until v2.6.37-rc2 most of the values were 32 bits. # The first value is pretty useless since it accounts for some # socket types but not others. So we don't report it because it's # more confusing than anything else and it's not well documented # what type of sockets are or aren't included in this count. regexp = re.compile("sockets: used \d+\n" "TCP: inuse (?P<tcp_inuse>\d+) orphan (?P<orphans>\d+)" " tw (?P<tw_count>\d+) alloc (?P<tcp_sockets>\d+)" " mem (?P<tcp_pages>\d+)\n" "UDP: inuse (?P<udp_inuse>\d+)" # UDP memory accounting was added in v2.6.25-rc1 "(?: mem (?P<udp_pages>\d+))?\n" # UDP-Lite (RFC 3828) was added in v2.6.20-rc2 "(?:UDPLITE: inuse (?P<udplite_inuse>\d+)\n)?" "RAW: inuse (?P<raw_inuse>\d+)\n" "FRAG: inuse (?P<ip_frag_nqueues>\d+)" " memory (?P<ip_frag_mem>\d+)\n") def print_sockstat(metric, value, tags=""): # Note: tags must start with ' ' if value is not None: print "net.sockstat.%s %d %s%s" % (metric, ts, value, tags) # If a line in /proc/net/{netstat,snmp} doesn't start with a word in that # dict, we'll ignore it. We use the value to build the metric name. known_statstypes = { "TcpExt:": "tcp", "IpExt:": "ip", # We don't collect anything from here for now. "Ip:": "ip", # We don't collect anything from here for now. "Icmp:": "icmp", # We don't collect anything from here for now. "IcmpMsg:": "icmpmsg", # We don't collect anything from here for now. "Tcp:": "tcp", # We don't collect anything from here for now. "Udp:": "udp", "UdpLite:": "udplite", # We don't collect anything from here for now. } # Any stat in /proc/net/{netstat,snmp} that doesn't appear in this dict will # be ignored. If we find a match, we'll use the (metricname, tags). tcp_stats = { # An application wasn't able to accept a connection fast enough, so # the kernel couldn't store an entry in the queue for this connection. # Instead of dropping it, it sent a cookie to the client. "SyncookiesSent": ("syncookies", "type=sent"), # After sending a cookie, it came back to us and passed the check. "SyncookiesRecv": ("syncookies", "type=received"), # After sending a cookie, it came back to us but looked invalid. "SyncookiesFailed": ("syncookies", "type=failed"), # When a socket is using too much memory (rmem), the kernel will first # discard any out-of-order packet that has been queued (with SACK). "OfoPruned": ("memory.prune", "type=drop_ofo_queue"), # If the kernel is really really desperate and cannot give more memory # to this socket even after dropping the ofo queue, it will simply # discard the packet it received. This is Really Bad. "RcvPruned": ("memory.prune", "type=drop_received"), # We waited for another packet to send an ACK, but didn't see any, so # a timer ended up sending a delayed ACK. "DelayedACKs": ("delayedack", "type=sent"), # We wanted to send a delayed ACK but failed because the socket was # locked. So the timer was reset. "DelayedACKLocked": ("delayedack", "type=locked"), # We sent a delayed and duplicated ACK because the remote peer # retransmitted a packet, thinking that it didn't get to us. "DelayedACKLost": ("delayedack", "type=lost"), # We completed a 3WHS but couldn't put the socket on the accept queue, # so we had to discard the connection. "ListenOverflows": ("failed_accept", "reason=full_acceptq"), # We couldn't accept a connection because one of: we had no route to # the destination, we failed to allocate a socket, we failed to # allocate a new local port bind bucket. Note: this counter # also include all the increments made to ListenOverflows... "ListenDrops": ("failed_accept", "reason=other"), # A packet was lost and we used Forward RTO-Recovery to retransmit. "TCPForwardRetrans": ("retransmit", "type=forward"), # A packet was lost and we fast-retransmitted it. "TCPFastRetrans": ("retransmit", "type=fast"), # A packet was lost and we retransmitted after a slow start. "TCPSlowStartRetrans": ("retransmit", "type=slowstart"), # A packet was lost and we recovered after a fast retransmit. "TCPRenoRecovery": ("packetloss.recovery", "type=fast_retransmit"), # A packet was lost and we recovered by using selective # acknowledgements. "TCPSackRecovery": ("packetloss.recovery", "type=sack"), # We detected re-ordering using FACK (Forward ACK -- the highest # sequence number known to have been received by the peer when using # SACK -- FACK is used during congestion control). "TCPFACKReorder": ("reording", "detectedby=fack"), # We detected re-ordering using SACK. "TCPSACKReorder": ("reording", "detectedby=sack"), # We detected re-ordering using fast retransmit. "TCPRenoReorder": ("reording", "detectedby=fast_retransmit"), # We detected re-ordering using the timestamp option. "TCPTSReorder": ("reording", "detectedby=timestamp"), # We detected some erroneous retransmits and undid our CWND reduction. "TCPFullUndo": ("congestion.recovery", "type=full_undo"), # We detected some erroneous retransmits, a partial ACK arrived while # we were fast retransmitting, so we were able to partially undo some # of our CWND reduction. "TCPPartialUndo": ("congestion.recovery", "type=hoe_heuristic"), # We detected some erroneous retransmits, a D-SACK arrived and ACK'ed # all the retransmitted data, so we undid our CWND reduction. "TCPDSACKUndo": ("congestion.recovery", "type=sack"), # We detected some erroneous retransmits, a partial ACK arrived, so we # undid our CWND reduction. "TCPLossUndo": ("congestion.recovery", "type=ack"), # We received an unexpected SYN so we sent a RST to the peer. "TCPAbortOnSyn": ("abort", "type=unexpected_syn"), # We were in FIN_WAIT1 yet we received a data packet with a sequence # number that's beyond the last one for this connection, so we RST'ed. "TCPAbortOnData": ("abort", "type=data_after_fin_wait1"), # We received data but the user has closed the socket, so we have no # wait of handing it to them, so we RST'ed. "TCPAbortOnClose": ("abort", "type=data_after_close"), # This is Really Bad. It happens when there are too many orphaned # sockets (not attached a FD) and the kernel has to drop a connection. # Sometimes it will send a reset to the peer, sometimes it wont. "TCPAbortOnMemory": ("abort", "type=out_of_memory"), # The connection timed out really hard. "TCPAbortOnTimeout": ("abort", "type=timeout"), # We killed a socket that was closed by the application and lingered # around for long enough. "TCPAbortOnLinger": ("abort", "type=linger"), # We tried to send a reset, probably during one of teh TCPABort* # situations above, but we failed e.g. because we couldn't allocate # enough memory (very bad). "TCPAbortFailed": ("abort.failed", None), # Number of times a socket was put in "memory pressure" due to a non # fatal memory allocation failure (reduces the send buffer size etc). "TCPMemoryPressures": ("memory.pressure", None), # We got a completely invalid SACK block and discarded it. "TCPSACKDiscard": ("invalid_sack", "type=invalid"), # We got a duplicate SACK while retransmitting so we discarded it. "TCPDSACKIgnoredOld": ("invalid_sack", "type=retransmit"), # We got a duplicate SACK and discarded it. "TCPDSACKIgnoredNoUndo": ("invalid_sack", "type=olddup"), # We received something but had to drop it because the socket's # receive queue was full. "TCPBacklogDrop": ("receive.queue.full", None), } known_stats = { "tcp": tcp_stats, "ip": { }, "icmp": { }, "icmpmsg": { }, "udp": { # Total UDP datagrams received by this host "InDatagrams": ("datagrams", "direction=in"), # UDP datagrams received on a port with no listener "NoPorts": ("errors", "direction=in reason=noport"), # Total UDP datagrams that could not be delivered to an application # Note: this counter also increments for RcvbufErrors "InErrors": ("errors", "direction=in reason=other"), # Total UDP datagrams sent from this host "OutDatagrams": ("datagrams", "direction=out"), # Datagrams for which not enough socket buffer memory to receive "RcvbufErrors": ("errors", "direction=in reason=nomem"), # Datagrams for which not enough socket buffer memory to transmit "SndbufErrors": ("errors", "direction=out reason=nomem"), }, "udplite": { }, } def print_netstat(statstype, metric, value, tags=""): if tags: space = " " else: tags = space = "" print "net.stat.%s.%s %d %s%s%s" % (statstype, metric, ts, value, space, tags) def parse_stats(stats, filename): statsdikt = {} # /proc/net/{netstat,snmp} have a retarded column-oriented format. It # looks like this: # Header: SomeMetric OtherMetric # Header: 1 2 # OtherHeader: ThirdMetric FooBar # OtherHeader: 42 51 # We first group all the lines for each header together: # {"Header:": [["SomeMetric", "OtherHeader"], ["1", "2"]], # "OtherHeader:": [["ThirdMetric", "FooBar"], ["42", "51"]]} # Then we'll create a dict for each type: # {"SomeMetric": "1", "OtherHeader": "2"} for line in stats.splitlines(): line = line.split() if line[0] not in known_statstypes: print >>sys.stderr, ("Unrecoginized line in %s:" " %r (file=%r)" % (filename, line, stats)) continue statstype = line.pop(0) statsdikt.setdefault(known_statstypes[statstype], []).append(line) for statstype, stats in statsdikt.iteritems(): # stats is now: # [["SyncookiesSent", "SyncookiesRecv", ...], ["1", "2", ....]] assert len(stats) == 2, repr(statsdikt) stats = dict(zip(*stats)) # Undo the kernel's double counting if "ListenDrops" in stats: stats["ListenDrops"] = int(stats["ListenDrops"]) - int(stats.get("ListenOverflows", 0)) elif "RcvbufErrors" in stats: stats["InErrors"] = int(stats.get("InErrors", 0)) - int(stats["RcvbufErrors"]) for stat, (metric, tags) in known_stats[statstype].iteritems(): value = stats.get(stat) if value is not None: print_netstat(statstype, metric, value, tags) while True: ts = int(time.time()) sockstat.seek(0) netstat.seek(0) snmp.seek(0) data = sockstat.read() netstats = netstat.read() snmpstats = snmp.read() m = re.match(regexp, data) if not m: print >>sys.stderr, "Cannot parse sockstat: %r" % data return 13 # The difference between the first two values is the number of # sockets allocated vs the number of sockets actually in use. print_sockstat("num_sockets", m.group("tcp_sockets"), " type=tcp") print_sockstat("num_timewait", m.group("tw_count")) print_sockstat("sockets_inuse", m.group("tcp_inuse"), " type=tcp") print_sockstat("sockets_inuse", m.group("udp_inuse"), " type=udp") print_sockstat("sockets_inuse", m.group("udplite_inuse"), " type=udplite") print_sockstat("sockets_inuse", m.group("raw_inuse"), " type=raw") print_sockstat("num_orphans", m.group("orphans")) print_sockstat("memory", int(m.group("tcp_pages")) * page_size, " type=tcp") if m.group("udp_pages") is not None: print_sockstat("memory", int(m.group("udp_pages")) * page_size, " type=udp") print_sockstat("memory", m.group("ip_frag_mem"), " type=ipfrag") print_sockstat("ipfragqueues", m.group("ip_frag_nqueues")) parse_stats(netstats, netstat.name) parse_stats(snmpstats, snmp.name) sys.stdout.flush() time.sleep(interval) if __name__ == "__main__": sys.exit(main())
HubSpot/tcollector
collectors/0/netstat.py
Python
gpl-3.0
17,157
0.001224
#ue.exec('pip2.py') import subprocess import sys import os import unreal_engine as ue import _thread as thread #ue.log(sys.path) _problemPaths = [''] def NormalizePaths(): problemPaths = _problemPaths #replace '/' to '\\' for i in range(len(sys.path)): currentPath = sys.path[i] sys.path[i] = currentPath.replace('\\','/') #find additional problem paths such as engine bin currentPath = sys.path[i] if('Engine' in currentPath and 'Epic Games' in currentPath): _problemPaths.append(currentPath) #cleanup for path in problemPaths: if path in sys.path: sys.path.remove(path) #define some convenience paths def PythonHomePath(): for path in sys.path: normalizedPath = AsAbsPath(path) if ('UnrealEnginePython' in normalizedPath and normalizedPath.endswith('Binaries/Win64')): return path #return sys.path[1] return "not found" def PythonHomeScriptsPath(): return AsAbsPath(PythonHomePath() + "/Scripts") def PythonPluginScriptPath(): for path in sys.path: normalizedPath = AsAbsPath(path) if ('UnrealEnginePython' in normalizedPath and normalizedPath.endswith('Content/Scripts')): return path return "not found" def PythonProjectScriptPath(): relativePath = PythonPluginScriptPath() + "/../../../../Content/Scripts"; return AsAbsPath(relativePath); def AsAbsPath(path): return os.path.abspath(path).replace('\\','/') _PythonHomePath = PythonHomePath() def FolderCommand(folder): #replace backslashes folder = folder.replace('/','\\') changefolder = "cd /d \"" + folder + "\" & " return changefolder #main public function def run(process, path=_PythonHomePath, verbose=True): #todo: change folder fullcommand = FolderCommand(path) + process if verbose: ue.log("Started cmd <" + fullcommand + ">") stdoutdata = subprocess.getstatusoutput(fullcommand) if verbose: ue.log("cmd Result: ") ue.log(stdoutdata[1]) return stdoutdata[1] #return the data for dependent functions def runStreaming(process, callback=None, path=_PythonHomePath, verbose=True): #todo: change folder fullcommand = FolderCommand(path) + process if verbose: print("Started cmd <" + fullcommand + ">") #streaming version popenobj = subprocess.Popen(fullcommand, stdout=subprocess.PIPE) output = '' for line in iter(process.stdout.readline, ''): #sys.stdout.write(line) print(line) output += line if verbose: print("cmd Result: ") print(output) return output #return the data for dependent functions #convenience override def runLogOutput(process, path=_PythonHomePath): fullcommand = FolderCommand(path) + process stdoutdata = subprocess.getstatusoutput(fullcommand) ue.log(stdoutdata[1]) return stdoutdata[1] #convenience wrappers def dir(path=_PythonHomePath): run('dir', path) def ls(path=_PythonHomePath): dir(path) def md(folder, path=_PythonHomePath): run('md ' + folder, path) def mkdir(folder, path=_PythonHomePath): md(folder, path)
getnamo/UnrealEnginePython
Content/Scripts/upycmd.py
Python
mit
2,925
0.037949
class Node: def __init__(self, value): self.value = value self.next = None self.prev = None class OldestUnique: def __init__(self): self.uniq = {} self.seen = set() self.head = None self.tail = None def feed(self, value): if value in self.uniq: # unlink from list but leave in uniq dict node = self.uniq[value] if node.prev is not None: node.prev.next = node.next else: self.head = node.next if node.next is not None: node.next.prev = node.prev else: self.tail = node.prev elif value not in self.seen: node = Node(value) if self.head is None: self.tail = node else: node.next = self.head self.head.prev = node self.head = node self.uniq[value] = node self.seen.add(value) def query(self): if self.tail is not None: return self.tail.value
frasertweedale/drill
py/oldest_unique.py
Python
mit
1,106
0
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2018-08-31 12:06 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('basicviz', '0073_auto_20180831_1203'), ] operations = [ migrations.AddField( model_name='experiment', name='csv_id_column', field=models.CharField(blank=True, max_length=128, null=True), ), migrations.AddField( model_name='experiment', name='ms2_id_field', field=models.CharField(blank=True, max_length=128, null=True), ), ]
sdrogers/ms2ldaviz
ms2ldaviz/basicviz/migrations/0074_auto_20180831_1206.py
Python
mit
670
0
""" Goal: set the environment for tests Docs: https://pythonhosted.org/Flask-SQLAlchemy/quickstart.html The only things you need to know compared to plain SQLAlchemy are: SQLAlchemy gives you access to the following things: - all the functions and classes from sqlalchemy and sqlalchemy.orm - a preconfigured scoped session called session - the metadata - the engine - a SQLAlchemy.create_all() and SQLAlchemy.drop_all() methods to create and drop tables according to the models - a Model baseclass that is a configured declarative base - The Model declarative base class behaves like a regular Python class but has a query attribute attached that can be used to query the model - You have to commit the session, but you don't have to remove it at the end of the request, Flask-SQLAlchemy does that for you. """ from flask_testing import TestCase from app.main import app, db, mail from app import initializer from config import MODE_TEST class BaseTestCase(TestCase): """ Base class for all tests""" def create_app(self): """ override the default config with the test config """ initializer.do_init(app, MODE_TEST) mail.init_app(app) return app def setUp(self): """ create all tables """ db.create_all() def tearDown(self): """ remove all tables """ db.session.remove() db.drop_all()
indera/barebones-flask-app
tests/base_test.py
Python
bsd-3-clause
1,403
0.000713
from .lconfiguration import LocalConfiguration from .printing import Printing from .word import Word from xml.etree import ElementTree class LinearisationRule: @classmethod def deserialise(cls, grammars, def_rule_etree): probability = float(def_rule_etree.get('p')) head_node_etree = def_rule_etree.find('NODE') linearisation_rule = {} for node_etree in head_node_etree.findall('NODE'): linearisation_rule[int(node_etree.get('ord'))] = ( node_etree.get('si'), Word.deserialise(node_etree)) head_node = (head_node_etree.get('si'), Word.deserialise(head_node_etree)) dependents = list(linearisation_rule.values()) dependents.sort() local_configuration = LocalConfiguration(head_node[0], head_node[1], tuple(dependents)) head_node_ord = int(head_node_etree.get('ord')) linearisation_rule[head_node_ord] = head_node linearisation_rule = list(linearisation_rule.items()) linearisation_rule.sort() head_node_index = linearisation_rule.index((head_node_ord, head_node)) linearisation_rule = LinearisationRule( [value for key, value in linearisation_rule[:head_node_index]], [value for key, value in linearisation_rule[head_node_index + 1:]]) try: grammars.get_grammars()[local_configuration][ probability] = linearisation_rule except (KeyError): grammars.get_grammars()[local_configuration] = {} grammars.get_grammars()[local_configuration][ probability] = linearisation_rule def __init__(self, insert, append): self.insert = insert self.append = append def get_insert(self): return self.insert def get_append(self): return self.append def __str__(self): return Printing.get_module_qualname(self) + ' = {\n' + \ ' insert = ' + Printing.shift_str(Printing.print_list(self.get_insert(), print_item=self.print_edge)) + '\n' + \ ' append = ' + Printing.shift_str(Printing.print_list(self.get_append(), print_item=self.print_edge)) + '\n' + \ '}' @classmethod def print_edge(cls, edge): return Printing.print_tuple(edge, print_item=[repr, str])
m5w/matxin-lineariser
matxin_lineariser/utlgrammars/lrule.py
Python
gpl-3.0
2,392
0.000836
# # This file is protected by Copyright. Please refer to the COPYRIGHT file # distributed with this source distribution. # # This file is part of GNUHAWK. # # GNUHAWK is free software: you can redistribute it and/or modify is under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # You should have received a copy of the GNU General Public License along with # this program. If not, see http://www.gnu.org/licenses/. # from ossie.utils import sb from ossie.utils.sandbox import local import gnuradioStubs import sources import commands import time def _uuidgen(): return commands.getoutput('uuidgen') def _isStubClass(comp): return (isinstance(comp, gnuradioStubs.stream_to_vector) or isinstance(comp, gnuradioStubs.vector_to_stream) or str(comp.__class__).find("stream_to_streams") >= 0) class top_block(object): def __init__(self, fanOut=True): self.sources = [] self.fanOut = fanOut def __del__(self): sb.domainless._cleanUpLaunchedComponents() # clear out prop_helpers _enums dictionary between test runs sb.domainless.prop_helpers._enums = {} def stop(self): # TODO: consider removing this sb.stop() def connect(self, src, dest, *next ): # If the source is not a real object, skip this connection if _isStubClass(src): if len(next) > 0: self.connect(dest, *next) return # If the destination is not a real object, skip both possible connections involving it if _isStubClass(dest): if len(next) > 0: self.connect(src, *next) return src_port_name = None if type(src) == tuple: src, index = src else: # Default to the first port index = 0 if isinstance(src, local.LocalComponent): # Get just the uses ports from the source uses_ports = filter(lambda x: x._direction == 'Uses', src._ports) if len(uses_ports) > index: src_port_name = uses_ports[index]._name if isinstance(src, gnuradioStubs.head): src = src.comp if type(dest) == tuple: dest, index = dest else: # Default to the first port index = 0 if isinstance(dest, local.LocalComponent): # Get just the provides ports from the destination provides_ports = filter(lambda x: x._direction == 'Provides', dest._ports) # -1 inputs; connect everybody to the same input port until existing components are # modified to have 1 port per allowed input if len(provides_ports) == 1: index = 0 dest_port_name = provides_ports[index]._name else: dest_port_name = None # Connect directly to the destination Component object for the stub head class. if isinstance(dest, gnuradioStubs.head): dest_obj = dest.comp else: dest_obj = dest if isinstance(src, sources.sbSource): # Generate a unique ID to disambiguate multiple streams into the same component. stream_id = _uuidgen() src.connect((dest_obj,stream_id)) if self.fanOut == True: if src not in self.sources: self.sources.append(src) else: self.sources.append(src) else: src.connect(dest_obj, usesPortName=src_port_name, providesPortName=dest_port_name) # Make next pair of connections if len(next) > 0: self.connect(dest, *next) def run(self): sb.start() for source in self.sources: # TODO: only do this if this is an sbSource # try statement is a little sloppy try: source.push() except AttributeError: pass # Give blocks time to get setup time.sleep(.1)
RedhawkSDR/integration-gnuhawk
qa/gnuradio/gr/top_block.py
Python
gpl-3.0
4,399
0.006365
#!/usr/bin/env python3 """ DNSSEC Single-Type Signing Scheme, RFC 6781 """ from dnstest.utils import * from dnstest.test import Test t = Test() knot = t.server("knot") zones = t.zone_rnd(5, dnssec=False, records=10) t.link(zones, knot) t.start() # one KSK knot.gen_key(zones[0], ksk=True, zsk=True, alg="ECDSAP256SHA256", key_len="256") # multiple KSKs knot.gen_key(zones[1], ksk=True, zsk=True, alg="ECDSAP384SHA384", key_len="384") knot.gen_key(zones[1], ksk=True, zsk=True, alg="ECDSAP256SHA256", key_len="256") # different algorithms: KSK+ZSK pair, one KSK knot.gen_key(zones[2], ksk=True, alg="ECDSAP256SHA256", key_len="256") knot.gen_key(zones[2], ksk=False, alg="ECDSAP256SHA256", key_len="256") knot.gen_key(zones[2], ksk=True, zsk=True, alg="ECDSAP384SHA384", key_len="384") # one ZSK knot.gen_key(zones[3], ksk=False, alg="ECDSAP256SHA256", key_len="256").change_role(ksk=True, zsk=True) for zone in zones[:-1]: knot.dnssec(zone).enable = True knot.dnssec(zone).single_type_signing = True # enable automatic Single-Type signing scheme with NSEC3 on the last zone knot.dnssec(zones[-1]).enable = True knot.dnssec(zones[-1]).nsec3 = True knot.dnssec(zones[-1]).single_type_signing = True knot.gen_confile() knot.reload() t.sleep(7) knot.flush(wait=True) knot.stop() for zone in zones: knot.zone_verify(zone) t.end()
CZ-NIC/knot
tests-extra/tests/dnssec/single_type_signing/test.py
Python
gpl-3.0
1,348
0.003709
# -*- coding: utf-8 -*- from __future__ import unicode_literals, division from django.shortcuts import redirect, get_object_or_404 from django.db import transaction from xue.common.decorators import quickview, limit_role from xue.tutor.forms import StudentApplicationForm, ProjectSelectionForm from xue.tutor.models import StudentProject, StudentApplication, TutorProject # expiration... PRELIMINARY_EXPIRED, SECONDARY_EXPIRED = False, False @limit_role([0]) @quickview('tutor/stud_apply_expired.html') def apply_expired_view(request): return {} @limit_role([0]) @quickview('tutor/stud_apply.html') def apply_view(request): is_repeat = False try: StudentApplication.objects.get(student=request.user) is_repeat = True except StudentApplication.DoesNotExist: pass if request.method == 'POST': # form data frm = StudentApplicationForm(request.POST) if frm.is_valid(): # valid data, store it if no previous application exists if not is_repeat: with transaction.commit_on_success(): entry = frm.save(commit=False) entry.student = request.user entry.save() return redirect('xue.tutor.views.mainpage_view') else: frm = StudentApplicationForm() return { 'form': frm, 'is_repeat': is_repeat, } @limit_role([0]) @quickview('tutor/stud_selectproj.html') def selectproj_view(request): # protect against rejected applicants and other random people dummy = get_object_or_404( StudentApplication, student=request.user, status=1, ) # verify max count projs = list(StudentProject.objects.filter(student=request.user)) if len(projs) >= 2: return { 'is_exceeded': True, 'projects': projs, } year = request.user.central_info.klass.date.year if request.method == 'POST': # form data frm = ProjectSelectionForm(year, request.POST) if frm.is_valid(): # valid data, store it with transaction.commit_on_success(): entry = frm.save(commit=False) entry.student = request.user entry.save() return redirect('xue.tutor.views.mainpage_view') else: frm = ProjectSelectionForm(year) return { 'is_exceeded': False, 'projects': projs, 'form': frm, } # expiration if PRELIMINARY_EXPIRED: apply_view = apply_expired_view if SECONDARY_EXPIRED: selectproj_view = apply_expired_view # vim:set ai et ts=4 sw=4 sts=4 fenc=utf-8:
team-xue/xue
xue/tutor/studentviews.py
Python
bsd-3-clause
2,757
0
# -*- coding: utf-8 -*- # MORFEO Project # http://morfeo-project.org # # Component: EzForge # # (C) Copyright 2004 Telefónica Investigación y Desarrollo # S.A.Unipersonal (Telefónica I+D) # # Info about members and contributors of the MORFEO project # is available at: # # http://morfeo-project.org/ # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # If you want to use this software an plan to distribute a # proprietary application in any way, and you are not licensing and # distributing your source code under GPL, you probably need to # purchase a commercial license of the product. More info about # licensing options is available at: # # http://morfeo-project.org/ # import types from decimal import * from django.db import models from django.utils import simplejson from django.core.serializers.json import DateTimeAwareJSONEncoder from xml.dom.minidom import getDOMImplementation def json_encode(data, ensure_ascii=False): """ The main issues with django's default json serializer is that properties that had been added to a object dynamically are being ignored (and it also has problems with some models). """ def _any(data): ret = None if type(data) is types.ListType: ret = _list(data) elif type(data) is types.DictType: ret = _dict(data) elif isinstance(data, Decimal): # json.dumps() cant handle Decimal ret = str(data) elif isinstance(data, models.query.QuerySet): # Actually its the same as a list ... ret = _list(data) elif isinstance(data, models.Model): ret = _model(data) else: ret = data return ret def _model(data): ret = {} # If we only have a model, we only want to encode the fields. for f in data._meta.fields: ret[f.attname] = _any(getattr(data, f.attname)) # And additionally encode arbitrary properties that had been added. fields = dir(data.__class__) + ret.keys() add_ons = [k for k in dir(data) if k not in fields] for k in add_ons: ret[k] = _any(getattr(data, k)) return ret def _list(data): ret = [] for v in data: ret.append(_any(v)) return ret def _dict(data): ret = {} for k,v in data.items(): ret[k] = _any(v) return ret ret = _any(data) return simplejson.dumps(ret, cls=DateTimeAwareJSONEncoder, ensure_ascii=ensure_ascii) def get_xml_error(value): dom = getDOMImplementation() doc = dom.createDocument(None, "error", None) rootelement = doc.documentElement text = doc.createTextNode(value) rootelement.appendChild(text) errormsg = doc.toxml() doc.unlink() return errormsg def getInnerText (domNode, tag=None): try: if tag ==None: return domNode.childNodes[0].nodeValue else: return domNode.getElementsByTagName(tag)[0].childNodes[0].nodeValue except: return None
kgblll/libresoft-gymkhana
commons/utils.py
Python
gpl-2.0
3,773
0.015119
######################################################################## # File name: __init__.py # This file is part of: aioxmpp # # LICENSE # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program. If not, see # <http://www.gnu.org/licenses/>. # ######################################################################## from aioxmpp.e2etest import ( # NOQA setup_package as e2etest_setup_package, teardown_package, ) import warnings def setup_package(): e2etest_setup_package() warnings.filterwarnings( "error", message=".+(Stream)?ErrorCondition", category=DeprecationWarning, )
horazont/aioxmpp
tests/__init__.py
Python
lgpl-3.0
1,184
0
import datetime import os import shutil import time from files_by_date.utils.logging_wrapper import get_logger, log_message from files_by_date.validators.argument_validator import ArgumentValidator logger = get_logger(name='files_service') class FilesService: def __init__(self): raise NotImplementedError @classmethod def gather_files(cls, parent_directory, files): for dir_name, subdir_list, file_list in os.walk(parent_directory): if file_list: files.extend( ['{dir_name}{os_sep}{file_name}'.format(dir_name=dir_name, os_sep=os.sep, file_name=file) for file in file_list]) # [f'{dir_name}{os.sep}{file}' for file in file_list] # 3.6 for subdir in subdir_list: files = cls.gather_files(subdir, files) return files @classmethod def group_files_by_modified_date(cls, files): grouped_files = {} for file in files: directory_tag = cls._get_directory_tag_for_file(file) file_group = grouped_files.get(directory_tag, list()) file_group.append(file) grouped_files[directory_tag] = file_group return grouped_files @classmethod def copy_files(cls, file_groups, target_dir, force_overwrite): if not os.path.exists(target_dir): os.makedirs(target_dir) # TODO: not covered total_count = Count() for group in file_groups: group_count = Count() # group_dir = f'{target_dir}{os.sep}{group}' # 3.6 group_dir = '{target_dir}{os_sep}{group}'.format(target_dir=target_dir, os_sep=os.sep, group=group) ArgumentValidator.validate_target_dir(group_dir) if not os.path.exists(group_dir): os.makedirs(group_dir) # log_message(f'Created directory: {group_dir}') # 3.6 log_message('Created directory: {group_dir}'.format(group_dir=group_dir)) # log_message(f'Copying {len(file_groups[group])} files to {group_dir}') # 3.6 log_message('Moving {group_size} files to {group_dir}'.format(group_size=len(file_groups[group]), group_dir=group_dir)) for file in file_groups[group]: # file_path = f'{group_dir}{os.sep}{os.path.basename(file)}' # 3.6 file_path = '{group_dir}{os_sep}{file_name}'.format(group_dir=group_dir, os_sep=os.sep, file_name=os.path.basename(file)) if force_overwrite and os.path.exists(file_path): os.remove(file_path) if not os.path.exists(file_path): shutil.copy2(file, group_dir) group_count.add_copied(count=1) else: group_count.add_skipped(count=1) # TODO: not covered total_count.add_files(count=len(file_groups[group])) total_count.add_copied(count=group_count.copied) total_count.add_skipped(count=group_count.skipped) # log_message(f'Copied {group_count.copied}, skipped {group_count.skipped}') # 3.6 log_message('Copied {local_copied_count}, skipped {local_skipped_count}'.format( local_copied_count=group_count.copied, local_skipped_count=group_count.skipped)) log_message( # f'Total files count {total_count.files}, total copied {total_count.copied}, total skipped {total_count.skipped}') # 3.6 'Total files count {total_files_count}, total copied {total_copied_count}, total skipped {total_skipped_count}'.format( total_files_count=total_count.files, total_copied_count=total_count.copied, total_skipped_count=total_count.skipped)) return total_count @staticmethod def _get_directory_tag_for_file(file): return datetime.datetime.strptime(time.ctime(os.path.getmtime(file)), "%a %b %d %H:%M:%S %Y").strftime('%Y%m') class Count: def __init__(self, *, files=0, copied=0, skipped=0): self.files = files self.copied = copied self.skipped = skipped def __str__(self): # return f'files={self.files}, copied={self.copied}, skipped={self.skipped}' # 3.6 return 'files={files}, copied={copied}, skipped={skipped}'.format(files=self.files, copied=self.copied, skipped=self.skipped) def add_files(self, *, count=1): self.files += count def add_copied(self, *, count=0): self.copied += count def add_skipped(self, *, count=0): self.skipped += count
DEV3L/python-files-by-date
files_by_date/service/files_service.py
Python
mit
4,825
0.003731
"""Python interface to GenoLogics LIMS via its REST API. Entities and their descriptors for the LIMS interface. Per Kraulis, Science for Life Laboratory, Stockholm, Sweden. Copyright (C) 2012 Per Kraulis """ from genologics.constants import nsmap try: from urllib.parse import urlsplit, urlparse, parse_qs, urlunparse except ImportError: from urlparse import urlsplit, urlparse, parse_qs, urlunparse import datetime import time from xml.etree import ElementTree import logging logger = logging.getLogger(__name__) class BaseDescriptor(object): "Abstract base descriptor for an instance attribute." def __get__(self, instance, cls): raise NotImplementedError class TagDescriptor(BaseDescriptor): """Abstract base descriptor for an instance attribute represented by an XML element. """ def __init__(self, tag): self.tag = tag def get_node(self, instance): if self.tag: return instance.root.find(self.tag) else: return instance.root class StringDescriptor(TagDescriptor): """An instance attribute containing a string value represented by an XML element. """ def __get__(self, instance, cls): instance.get() node = self.get_node(instance) if node is None: return None else: return node.text def __set__(self, instance, value): instance.get() node = self.get_node(instance) if node is None: # create the new tag node = ElementTree.Element(self.tag) instance.root.append(node) node.text = str(value) class StringAttributeDescriptor(TagDescriptor): """An instance attribute containing a string value represented by an XML attribute. """ def __get__(self, instance, cls): instance.get() return instance.root.attrib[self.tag] def __set__(self, instance, value): instance.get() instance.root.attrib[self.tag] = value class StringListDescriptor(TagDescriptor): """An instance attribute containing a list of strings represented by multiple XML elements. """ def __get__(self, instance, cls): instance.get() result = [] for node in instance.root.findall(self.tag): result.append(node.text) return result class StringDictionaryDescriptor(TagDescriptor): """An instance attribute containing a dictionary of string key/values represented by a hierarchical XML element. """ def __get__(self, instance, cls): instance.get() result = dict() node = instance.root.find(self.tag) if node is not None: for node2 in node.getchildren(): result[node2.tag] = node2.text return result class IntegerDescriptor(StringDescriptor): """An instance attribute containing an integer value represented by an XMl element. """ def __get__(self, instance, cls): text = super(IntegerDescriptor, self).__get__(instance, cls) if text is not None: return int(text) class IntegerAttributeDescriptor(TagDescriptor): """An instance attribute containing a integer value represented by an XML attribute. """ def __get__(self, instance, cls): instance.get() return int(instance.root.attrib[self.tag]) class BooleanDescriptor(StringDescriptor): """An instance attribute containing a boolean value represented by an XMl element. """ def __get__(self, instance, cls): text = super(BooleanDescriptor, self).__get__(instance, cls) if text is not None: return text.lower() == 'true' def __set__(self, instance, value): super(BooleanDescriptor, self).__set__(instance, str(value).lower()) class UdfDictionary(object): "Dictionary-like container of UDFs, optionally within a UDT." def _is_string(self, value): try: return isinstance(value, basestring) except: return isinstance(value, str) def __init__(self, instance, *args, **kwargs): self.instance = instance self._udt = kwargs.pop('udt', False) self.rootkeys = args self._rootnode = None self._update_elems() self._prepare_lookup() self.location = 0 @property def rootnode(self): if not self._rootnode: self._rootnode = self.instance.root for rootkey in self.rootkeys: self._rootnode = self._rootnode.find(rootkey) return self._rootnode def get_udt(self): if self._udt == True: return None else: return self._udt def set_udt(self, name): assert isinstance(name, str) if not self._udt: raise AttributeError('cannot set name for a UDF dictionary') self._udt = name elem = self.rootnode.find(nsmap('udf:type')) assert elem is not None elem.set('name', name) udt = property(get_udt, set_udt) def _update_elems(self): self._elems = [] if self._udt: elem = self.rootnode.find(nsmap('udf:type')) if elem is not None: self._udt = elem.attrib['name'] self._elems = elem.findall(nsmap('udf:field')) else: tag = nsmap('udf:field') for elem in self.rootnode.getchildren(): if elem.tag == tag: self._elems.append(elem) def _prepare_lookup(self): self._lookup = dict() for elem in self._elems: type = elem.attrib['type'].lower() value = elem.text if not value: value = None elif type == 'numeric': try: value = int(value) except ValueError: value = float(value) elif type == 'boolean': value = value == 'true' elif type == 'date': value = datetime.date(*time.strptime(value, "%Y-%m-%d")[:3]) self._lookup[elem.attrib['name']] = value def __contains__(self, key): try: self._lookup[key] except KeyError: return False return True def __getitem__(self, key): return self._lookup[key] def __setitem__(self, key, value): self._lookup[key] = value for node in self._elems: if node.attrib['name'] != key: continue vtype = node.attrib['type'].lower() if value is None: pass elif vtype == 'string': if not self._is_string(value): raise TypeError('String UDF requires str or unicode value') elif vtype == 'str': if not self._is_string(value): raise TypeError('String UDF requires str or unicode value') elif vtype == 'text': if not self._is_string(value): raise TypeError('Text UDF requires str or unicode value') elif vtype == 'numeric': if not isinstance(value, (int, float)): raise TypeError('Numeric UDF requires int or float value') value = str(value) elif vtype == 'boolean': if not isinstance(value, bool): raise TypeError('Boolean UDF requires bool value') value = value and 'true' or 'false' elif vtype == 'date': if not isinstance(value, datetime.date): # Too restrictive? raise TypeError('Date UDF requires datetime.date value') value = str(value) elif vtype == 'uri': if not self._is_string(value): raise TypeError('URI UDF requires str or punycode (unicode) value') value = str(value) else: raise NotImplemented("UDF type '%s'" % vtype) if not isinstance(value, str): if not self._is_string(value): value = str(value).encode('UTF-8') node.text = value break else: # Create new entry; heuristics for type if self._is_string(value): vtype = '\n' in value and 'Text' or 'String' elif isinstance(value, bool): vtype = 'Boolean' value = value and 'true' or 'false' elif isinstance(value, (int, float)): vtype = 'Numeric' value = str(value) elif isinstance(value, datetime.date): vtype = 'Date' value = str(value) else: raise NotImplementedError("Cannot handle value of type '%s'" " for UDF" % type(value)) if self._udt: root = self.rootnode.find(nsmap('udf:type')) else: root = self.rootnode elem = ElementTree.SubElement(root, nsmap('udf:field'), type=vtype, name=key) if not isinstance(value, str): if not self._is_string(value): value = str(value) elem.text = value #update the internal elements and lookup with new values self._update_elems() self._prepare_lookup() def __delitem__(self, key): del self._lookup[key] for node in self._elems: if node.attrib['name'] == key: self.rootnode.remove(node) break def items(self): return list(self._lookup.items()) def clear(self): for elem in self._elems: self.rootnode.remove(elem) self._update_elems() def __iter__(self): return self def __next__(self): try: ret = list(self._lookup.keys())[self.location] except IndexError: raise StopIteration() self.location = self.location + 1 return ret def get(self, key, default=None): return self._lookup.get(key, default) class UdfDictionaryDescriptor(BaseDescriptor): """An instance attribute containing a dictionary of UDF values represented by multiple XML elements. """ _UDT = False def __init__(self, *args): super(BaseDescriptor, self).__init__() self.rootkeys = args def __get__(self, instance, cls): instance.get() self.value = UdfDictionary(instance, *self.rootkeys, udt=self._UDT) return self.value def __set__(self, instance, dict_value): instance.get() udf_dict = UdfDictionary(instance, *self.rootkeys, udt=self._UDT) udf_dict.clear() for k in dict_value: udf_dict[k] = dict_value[k] class UdtDictionaryDescriptor(UdfDictionaryDescriptor): """An instance attribute containing a dictionary of UDF values in a UDT represented by multiple XML elements. """ _UDT = True class PlacementDictionaryDescriptor(TagDescriptor): """An instance attribute containing a dictionary of locations keys and artifact values represented by multiple XML elements. """ def __get__(self, instance, cls): from genologics.entities import Artifact instance.get() self.value = dict() for node in instance.root.findall(self.tag): key = node.find('value').text self.value[key] = Artifact(instance.lims, uri=node.attrib['uri']) return self.value class ExternalidListDescriptor(BaseDescriptor): """An instance attribute yielding a list of tuples (id, uri) for external identifiers represented by multiple XML elements. """ def __get__(self, instance, cls): instance.get() result = [] for node in instance.root.findall(nsmap('ri:externalid')): result.append((node.attrib.get('id'), node.attrib.get('uri'))) return result class EntityDescriptor(TagDescriptor): "An instance attribute referencing another entity instance." def __init__(self, tag, klass): super(EntityDescriptor, self).__init__(tag) self.klass = klass def __get__(self, instance, cls): instance.get() node = instance.root.find(self.tag) if node is None: return None else: return self.klass(instance.lims, uri=node.attrib['uri']) def __set__(self, instance, value): instance.get() node = self.get_node(instance) if node is None: # create the new tag node = ElementTree.Element(self.tag) instance.root.append(node) node.attrib['uri'] = value.uri class EntityListDescriptor(EntityDescriptor): """An instance attribute yielding a list of entity instances represented by multiple XML elements. """ def __get__(self, instance, cls): instance.get() result = [] for node in instance.root.findall(self.tag): result.append(self.klass(instance.lims, uri=node.attrib['uri'])) return result class NestedAttributeListDescriptor(StringAttributeDescriptor): """An instance yielding a list of dictionnaries of attributes for a nested xml list of XML elements""" def __init__(self, tag, *args): super(StringAttributeDescriptor, self).__init__(tag) self.tag = tag self.rootkeys = args def __get__(self, instance, cls): instance.get() result = [] rootnode = instance.root for rootkey in self.rootkeys: rootnode = rootnode.find(rootkey) for node in rootnode.findall(self.tag): result.append(node.attrib) return result class NestedStringListDescriptor(StringListDescriptor): """An instance yielding a list of strings for a nested list of xml elements""" def __init__(self, tag, *args): super(StringListDescriptor, self).__init__(tag) self.tag = tag self.rootkeys = args def __get__(self, instance, cls): instance.get() result = [] rootnode = instance.root for rootkey in self.rootkeys: rootnode = rootnode.find(rootkey) for node in rootnode.findall(self.tag): result.append(node.text) return result class NestedEntityListDescriptor(EntityListDescriptor): """same as EntityListDescriptor, but works on nested elements""" def __init__(self, tag, klass, *args): super(EntityListDescriptor, self).__init__(tag, klass) self.klass = klass self.tag = tag self.rootkeys = args def __get__(self, instance, cls): instance.get() result = [] rootnode = instance.root for rootkey in self.rootkeys: rootnode = rootnode.find(rootkey) for node in rootnode.findall(self.tag): result.append(self.klass(instance.lims, uri=node.attrib['uri'])) return result class DimensionDescriptor(TagDescriptor): """An instance attribute containing a dictionary specifying the properties of a dimension of a container type. """ def __get__(self, instance, cls): instance.get() node = instance.root.find(self.tag) return dict(is_alpha=node.find('is-alpha').text.lower() == 'true', offset=int(node.find('offset').text), size=int(node.find('size').text)) class LocationDescriptor(TagDescriptor): """An instance attribute containing a tuple (container, value) specifying the location of an analyte in a container. """ def __get__(self, instance, cls): from genologics.entities import Container instance.get() node = instance.root.find(self.tag) uri = node.find('container').attrib['uri'] return Container(instance.lims, uri=uri), node.find('value').text class ReagentLabelList(BaseDescriptor): """An instance attribute yielding a list of reagent labels""" def __get__(self, instance, cls): instance.get() self.value = [] for node in instance.root.findall('reagent-label'): try: self.value.append(node.attrib['name']) except: pass return self.value class InputOutputMapList(BaseDescriptor): """An instance attribute yielding a list of tuples (input, output) where each item is a dictionary, representing the input/output maps of a Process instance. """ def __init__(self, *args): super(BaseDescriptor, self).__init__() self.rootkeys = args def __get__(self, instance, cls): instance.get() self.value = [] rootnode = instance.root for rootkey in self.rootkeys: rootnode = rootnode.find(rootkey) for node in rootnode.findall('input-output-map'): input = self.get_dict(instance.lims, node.find('input')) output = self.get_dict(instance.lims, node.find('output')) self.value.append((input, output)) return self.value def get_dict(self, lims, node): from genologics.entities import Artifact, Process if node is None: return None result = dict() for key in ['limsid', 'output-type', 'output-generation-type']: try: result[key] = node.attrib[key] except KeyError: pass for uri in ['uri', 'post-process-uri']: try: result[uri] = Artifact(lims, uri=node.attrib[uri]) except KeyError: pass node = node.find('parent-process') if node is not None: result['parent-process'] = Process(lims, node.attrib['uri']) return result
BigelowLab/genologics
genologics/descriptors.py
Python
mit
18,060
0.000388
from allauth.account.signals import email_confirmed, email_changed, email_added, email_removed, user_signed_up, user_logged_in from django.contrib.auth.models import User, Group, Permission from django.db.models import Q from django.dispatch import receiver """intercept signals from allauth""" @receiver(email_confirmed) def email_confirmed_(sender, email_address, **kwargs): """user has confirmed the email manually""" # print(email_address.email + " confirmed email.") query = {'email': email_address.email} if email_address.primary: user = User.objects.get(**query) # print(str(user) + " confirmed primary email.") group = Group.objects.get(name='AllowedCommentary') user.groups.add(group) @receiver(user_signed_up) def user_signed_up_(sender, request, user, **kwargs): """when a user signs up""" # print("SIGN UP " + str(user) + " signed up and kwargs=" + str(kwargs)) social_login = kwargs.get('sociallogin', None) if social_login: social_account = social_login.account if social_account: if 'verified_email' in social_account.extra_data: if social_account.extra_data['verified_email']: group = Group.objects.get(name='AllowedCommentary') user.groups.add(group)
scotartt/commentarius
decommentariis/decommentariis/signals.py
Python
gpl-2.0
1,210
0.015702
from django.contrib import admin from .models import Message admin.site.register(Message)
Pylvax/django
project/starter_app/admin.py
Python
mit
92
0.01087
#!/usr/bin/env python """macro_tests.py: Some macro tests""" from __future__ import absolute_import, division, print_function, unicode_literals __author__ = "Fabien Cromieres" __license__ = "undecided" __version__ = "1.0" __email__ = "fabien.cromieres@gmail.com" __status__ = "Development" # import nmt_chainer.make_data as make_data # import nmt_chainer.training_module.train as train # import nmt_chainer.eval as eval from nmt_chainer.__main__ import main import os.path import pytest class TestTrainingManagement: def test_checkpoint_saving(self, tmpdir, gpu): """ Test no error happens during checkpoint saving. """ test_data_dir = os.path.join( os.path.dirname( os.path.abspath(__file__)), "../tests_data") train_dir = tmpdir.mkdir("train") data_prefix = str(train_dir.join("test1.data")) train_prefix = str(train_dir.join("test1.train")) data_src_file = os.path.join(test_data_dir, "src2.txt") data_tgt_file = os.path.join(test_data_dir, "tgt2.txt") args = 'make_data {0} {1} {2} --dev_src {0} --dev_tgt {1}'.format( data_src_file, data_tgt_file, data_prefix).split(' ') main(arguments=args) args_train = ["train", data_prefix, train_prefix] + "--max_nb_iters 10 --mb_size 2 --Ei 10 --Eo 12 --Hi 30 --Ha 70 --Ho 15 --Hl 23 --save_ckpt_every 5".split(" ") if gpu is not None: args_train += ['--gpu', gpu] main(arguments=args_train) def test_config_saving(self, tmpdir, gpu): """ Test no error happens during checkpoint saving. """ test_data_dir = os.path.join( os.path.dirname( os.path.abspath(__file__)), "../tests_data") train_dir = tmpdir.mkdir("train") data_prefix = str(train_dir.join("test1.data")) train_prefix = str(train_dir.join("test1.train")) data_src_file = os.path.join(test_data_dir, "src2.txt") data_tgt_file = os.path.join(test_data_dir, "tgt2.txt") args = 'make_data {0} {1} {2} --dev_src {0} --dev_tgt {1}'.format( data_src_file, data_tgt_file, data_prefix).split(' ') main(arguments=args) args_train = ["train", data_prefix, train_prefix] + "--max_nb_iters 5 --mb_size 2 --Ei 10 --Eo 12 --Hi 30 --Ha 70 --Ho 15 --Hl 23".split(" ") if gpu is not None: args_train += ['--gpu', gpu] main(arguments=args_train) config_filename = train_prefix + ".train.config" train_prefix_2 = train_prefix + ".2" args_train = ["train", "--config", config_filename, "--save_prefix", train_prefix_2] if gpu is not None: args_train += ['--gpu', gpu] main(arguments=args_train) config_filename2 = train_prefix_2 + ".train.config" import json config1 = json.load(open(config_filename)) config2 = json.load(open(config_filename2)) def compare_dict_except(d1, d2, except_fields=None): k_list_1 = set(d1.keys()) k_list_2 = set(d2.keys()) k_xor = (k_list_1 - k_list_2) | (k_list_2 - k_list_1) for k_diff in k_xor: if except_fields is None or k_diff not in except_fields: return False for k in k_list_1 & k_list_2: v1 = d1[k] if isinstance(v1, dict): compare_result = compare_dict_except(d1[k], d2[k], except_fields=except_fields) if not compare_result: return False else: if v1 != d2[k] and ( except_fields is None or k not in except_fields): return False return True assert compare_dict_except(config1, config2, except_fields="metadata save_prefix config".split())
fabiencro/knmt
tests/suite1/training_management_test.py
Python
gpl-3.0
3,928
0.001527
import wx import re import os import time import inspect cmdFolder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0])) gcRegex = re.compile("[-]?\d+[.]?\d*") from cnc import CNC from reprapenums import RepRapEventEnum from gcframe import GcFrame from properties import PropertiesDlg from propenums import PropertyEnum from printstateenum import PrintState from tools import formatElapsed from gcsuffix import parseGCSuffix from sdcard import SDCard from History.history import PrintStarted, PrintCompleted BUTTONDIM = (48, 48) BUTTONDIMWIDE = (96, 48) RECORD_TIMES = True class PrintButton(wx.BitmapButton): def __init__(self, parent, images): self.imgPrint = images.pngPrint self.imgRestart = images.pngRestart wx.BitmapButton.__init__(self, parent, wx.ID_ANY, self.imgPrint, size=BUTTONDIM) self.setPrint() def setPrint(self): self.SetBitmap(self.imgPrint) self.SetToolTip("Start printing") def setRestart(self): self.SetBitmap(self.imgRestart) self.SetToolTip("Restart print from the beginning") class PauseButton(wx.BitmapButton): def __init__(self, parent, images): wx.BitmapButton.__init__(self, parent, wx.ID_ANY, images.pngPause, size=BUTTONDIM) self.setPause() def setPause(self): self.SetToolTip("Pause printing") def setResume(self): self.SetToolTip("Resume print from the paused point") class PrintMonitorDlg(wx.Frame): def __init__(self, parent, wparent, reprap, prtName): self.parent = parent self.wparent = wparent self.log = self.parent.log self.history = wparent.history self.reprap = reprap self.settings = self.parent.settings self.images = self.parent.images self.state = PrintState.idle self.oldState = None self.gcodeLoaded = False self.gcodeFile = None self.printerName = prtName self.layerMap = [] self.okToImport = False self.importFile = None self.currentLayer = 0 self.maxTool = 0 self.eUsed = [0.0, 0.0, 0.0, 0.0] self.totalTime = 0 self.totalTimeStr = "" self.layerTimes = [] self.layerTimeStr = [] self.layerRange = (0, 0) self.gObj = None self.printLayer = 0 self.printPosition = None title = self.buildTitle() wx.Frame.__init__(self, wparent, wx.ID_ANY, title=title) self.Show() ico = wx.Icon(os.path.join(cmdFolder, "images", "printmon.png"), wx.BITMAP_TYPE_PNG) self.SetIcon(ico) if self.settings.hassdcard: self.sdcard = SDCard(self.parent, self, self.reprap, self.log) else: self.sdcard = None self.gcf = GcFrame(self, self.gObj, self.settings) self.stLayerText = wx.StaticText(self, wx.ID_ANY, "Layer Height: 0.00") ht = self.gcf.GetSize().Get()[1] - BUTTONDIM[1]*2 - 20 self.slLayers = wx.Slider( self, wx.ID_ANY, 0, 0, 1000, size=(-1, ht), style=wx.SL_VERTICAL | wx.SL_AUTOTICKS | wx.SL_LABELS | wx.SL_INVERSE) self.Bind(wx.EVT_SCROLL, self.onLayerScroll, self.slLayers) self.slLayers.Enable(False) self.cbShowMoves = wx.CheckBox(self, wx.ID_ANY, "Show moves") self.cbShowMoves.SetValue(self.settings.showmoves) self.Bind(wx.EVT_CHECKBOX, self.onShowMoves, self.cbShowMoves) self.cbShowPrevious = wx.CheckBox(self, wx.ID_ANY, "Show previous layer") self.cbShowPrevious.SetValue(self.settings.showprevious) self.Bind(wx.EVT_CHECKBOX, self.onShowPrevious, self.cbShowPrevious) self.cbToolPathOnly = wx.CheckBox(self, wx.ID_ANY, "Show tool paths only") self.cbToolPathOnly.SetValue(self.settings.toolpathonly) self.Bind(wx.EVT_CHECKBOX, self.onToolPathOnly, self.cbToolPathOnly) self.cbSyncPrint = wx.CheckBox(self, wx.ID_ANY, "Sync with print") self.cbSyncPrint.SetValue(True) self.Bind(wx.EVT_CHECKBOX, self.onSyncPrint, self.cbSyncPrint) self.bImport = wx.BitmapButton(self, wx.ID_ANY, self.images.pngImport, size=BUTTONDIM) self.bImport.SetToolTip("Import G Code file from toolbox") self.Bind(wx.EVT_BUTTON, self.onImport, self.bImport) self.bImportQ = wx.BitmapButton(self, wx.ID_ANY, self.images.pngNext, size=BUTTONDIM) self.Bind(wx.EVT_BUTTON, self.onImportFromQueue, self.bImportQ) self.bOpen = wx.BitmapButton(self, wx.ID_ANY, self.images.pngFileopen, size=BUTTONDIM) self.bOpen.SetToolTip("Open a G Code file") self.Bind(wx.EVT_BUTTON, self.onOpenFile, self.bOpen) self.Bind(wx.EVT_CLOSE, self.onClose) self.bPrint = PrintButton(self, self.images) self.bPrint.Enable(False) self.Bind(wx.EVT_BUTTON, self.onPrint, self.bPrint) self.bPause = PauseButton(self, self.images) self.bPause.Enable(False) self.Bind(wx.EVT_BUTTON, self.onPause, self.bPause) self.bSdPrintTo = wx.BitmapButton(self, wx.ID_ANY, self.images.pngSdprintto, size=(BUTTONDIMWIDE)) self.bSdPrintTo.Enable(False) self.Bind(wx.EVT_BUTTON, self.onSdPrintTo, self.bSdPrintTo) self.bSdPrintFrom = wx.BitmapButton(self, wx.ID_ANY, self.images.pngSdprintfrom, size=(BUTTONDIMWIDE)) self.bSdPrintFrom.Enable(False) self.Bind(wx.EVT_BUTTON, self.onSdPrintFrom, self.bSdPrintFrom) self.bSdDelete = wx.BitmapButton(self, wx.ID_ANY, self.images.pngSddelete, size=(BUTTONDIM)) self.bSdDelete.Enable(False) self.Bind(wx.EVT_BUTTON, self.onSdDelete, self.bSdDelete) self.bUp = wx.BitmapButton(self, wx.ID_ANY, self.images.pngUp, size=BUTTONDIM) self.bUp.SetToolTip("Move up one layer") self.Bind(wx.EVT_BUTTON, self.onUp, self.bUp) self.bUp.Enable(False) self.bDown = wx.BitmapButton(self, wx.ID_ANY, self.images.pngDown, size=BUTTONDIM) self.bDown.SetToolTip("Move down one layer") self.Bind(wx.EVT_BUTTON, self.onDown, self.bDown) self.bDown.Enable(False) szGcf = wx.BoxSizer(wx.HORIZONTAL) szGcf.AddSpacer(10) szGcf.Add(self.gcf) szGcf.Add(self.stLayerText, 1, wx.ALIGN_CENTER_HORIZONTAL, 1) szGcf.AddSpacer(10) szNav = wx.BoxSizer(wx.VERTICAL) szNav.Add(self.bUp, 1, wx.ALIGN_CENTER_HORIZONTAL, 1) szNav.AddSpacer(10) szNav.Add(self.slLayers) szNav.AddSpacer(10) szNav.Add(self.bDown, 1, wx.ALIGN_CENTER_HORIZONTAL, 1) szGcf.Add(szNav) szGcf.AddSpacer(10) szOpts = wx.BoxSizer(wx.HORIZONTAL) szOpts.AddSpacer(10) szOpts.Add(self.cbShowMoves) szOpts.AddSpacer(10) szOpts.Add(self.cbShowPrevious) szOpts.AddSpacer(10) szOpts.Add(self.cbToolPathOnly) szOpts.AddSpacer(10) szOpts.Add(self.cbSyncPrint) szOpts.AddSpacer(10) szBtn = wx.BoxSizer(wx.HORIZONTAL) szBtn.AddSpacer(10) szBtn.Add(self.bImport) szBtn.AddSpacer(10) szBtn.Add(self.bImportQ) szBtn.AddSpacer(10) szBtn.Add(self.bOpen) szBtn.AddSpacer(20) szBtn.Add(self.bPrint) szBtn.AddSpacer(10) szBtn.Add(self.bPause) if self.sdcard: szBtn.AddSpacer(20) szBtn.Add(self.bSdPrintTo) szBtn.AddSpacer(10) szBtn.Add(self.bSdPrintFrom) szBtn.AddSpacer(10) szBtn.Add(self.bSdDelete) szBtn.AddSpacer(10) szDlg = wx.BoxSizer(wx.VERTICAL) szDlg.AddSpacer(10) szDlg.Add(szGcf) szDlg.AddSpacer(10) szDlg.Add(szOpts) szDlg.AddSpacer(10) szDlg.Add(szBtn) szDlg.AddSpacer(10) self.SetSizer(szDlg) self.Fit() self.Layout() self.propDlg = PropertiesDlg(self, wparent, self.printerName) self.propDlg.Show() if not self.settings.propposition is None: self.propDlg.SetPosition(self.settings.propposition) self.enableButtonsByState() self.reprap.registerPositionHandler(self.updatePrintPosition) self.reprap.registerEventHandler(self.reprapEvent) self.reprap.registerSdEventHandler(self.sdcard) def show(self): self.Show() self.Raise() self.propDlg.Show() self.propDlg.Raise() def setLayerText(self, ht): if ht is None: htv = 0.0 else: htv = ht self.stLayerText.SetLabel("Layer Height: %0.3f" % htv) def getStatusReport(self): r = self.propDlg.getStatusReport() r["PrintStatus"] = PrintState.label[self.state] return r def buildTitle(self): t = "%s print monitor" % self.printerName if self.gcodeLoaded: if len(self.gcodeFile) > 45: t += " - %s" % os.path.basename(self.gcodeFile) else: t += " - %s" % self.gcodeFile return t def rememberPositions(self): self.settings.propposition = self.propDlg.GetPosition() def isPrinting(self): return self.state in [PrintState.printing, PrintState.sdprintingto, PrintState.sdprintingfrom] def onClose(self, evt): if self.isPrinting(): dlg = wx.MessageDialog(self, 'Cannot exit with printing active', "Printer is active", wx.OK | wx.ICON_INFORMATION) dlg.ShowModal() dlg.Destroy() return self.terminate() def terminate(self): self.reprap.registerPositionHandler(None) self.reprap.registerEventHandler(None) self.parent.closePrintMon() self.propDlg.Destroy() self.Destroy() def onShowMoves(self, evt): v = self.cbShowMoves.GetValue() self.settings.showmoves = v self.gcf.setShowMoves(v) def onShowPrevious(self, evt): v = self.cbShowPrevious.GetValue() self.settings.showprevious = v self.gcf.setShowPrevious(v) def onToolPathOnly(self, evt): v = self.cbToolPathOnly.GetValue() self.settings.toolpathonly = v self.gcf.setToolPathsOnly(v) def onSyncPrint(self, evt): v = self.cbSyncPrint.GetValue() self.gcf.setSyncWithPrint(v) def onLayerScroll(self, evt): v = self.slLayers.GetValue() if v == self.currentLayer: return self.gcf.setLayer(v) self.changeLayer(v) def onUp(self, evt): lmax = self.slLayers.GetRange()[1] if self.currentLayer >= lmax: return v = self.currentLayer + 1 self.gcf.setLayer(v) self.changeLayer(v) def onDown(self, evt): if self.currentLayer <= 0: return v = self.currentLayer - 1 self.gcf.setLayer(v) self.changeLayer(v) def onImport(self, evt): fn = self.wparent.importGcFile() if fn is None: return self.loadGFile(fn) def onImportFromQueue(self, evt): fn = self.wparent.importGcFromQueue() if fn is None: return self.loadGFile(fn) def setImportButton(self, msg): if msg is None: self.okToImport = False self.bImportQ.SetToolTip("") self.bImportQ.Enable(False) else: self.okToImport = True self.bImportQ.SetToolTip(msg) self.bImportQ.Enable(self.bOpen.IsEnabled()) def setImportFile(self, fn): self.importFile = fn if fn is None: self.bImport.Enable(False) self.bImport.SetToolTip("") else: self.bImport.Enable(self.bOpen.IsEnabled()) self.bImport.SetToolTip("Import G Code file (%s)" % fn) def onOpenFile(self, evt): wildcard = "GCode (*.gcode)|*.gcode;*.GCODE|" \ "All files (*.*)|*.*" dlg = wx.FileDialog( self, message="Choose a GCode file", defaultDir=self.settings.lastdirectory, defaultFile="", wildcard=wildcard, style=wx.FD_OPEN) rc = dlg.ShowModal() if rc == wx.ID_OK: path = dlg.GetPath().encode('ascii','ignore') dlg.Destroy() if rc != wx.ID_OK: return self.loadGFile(path) def loadGFile(self, path): self.settings.lastdirectory = os.path.dirname(path) self.loadGCode(path) if self.gObj is None: lmax = 1 self.slLayers.Enable(False) self.bUp.Enable(False) self.bDown.Enable(False) else: lmax = self.gObj.layerCount()-1 self.slLayers.Enable(True) self.bUp.Enable(True) self.bDown.Enable(True) self.slLayers.SetRange(0, lmax) self.slLayers.SetPageSize(int(lmax/10)) self.gcf.loadModel(self.gObj) self.changeLayer(0) self.state = PrintState.idle self.oldState = None self.enableButtonsByState() t = self.buildTitle() self.SetTitle(t) self.propDlg.setPrintStatus(PrintState.idle) def loadGCode(self, fn): def gnormal(s): if ";" in s: return s.split(";")[0].rstrip() else: return s.rstrip() self.gcodeFile = None self.gcodeLoaded = False self.gcode = [] self.gObj = None self.maxLine = 0 self.totalTime = 0 self.totalTimeStr = "" self.layerTimes = [] self.layerTimeStr = [] self.propDlg.clearAllProperties() self.reprap.clearPrint() if fn is None: return try: gc = list(open(fn)) except: self.log("Error opening file %s" % fn) self.gcode = [] self.gObj = None self.gcodeLoaded = False return self.gcode = [s for s in map(gnormal, gc) if s.strip() != ""] self.gObj = self.buildModel() self.maxLine = self.gObj.getMaxLine() self.eUsed = self.gObj.getFilament() self.gcodeLoaded = True self.gcodeFile = pfn = fn if len(pfn) > 45: pfn = os.path.basename(fn) self.propDlg.setProperty(PropertyEnum.fileName, pfn) ftime = time.strftime('%y/%m/%d-%H:%M:%S', time.localtime(os.path.getmtime(fn))) self.propDlg.setProperty(PropertyEnum.sliceTime, ftime) self.propDlg.setProperty(PropertyEnum.printEstimate, self.totalTimeStr) if self.settings.nextruders < self.maxTool+1: self.log("G Code file uses more tools (%d) than printer is equipped with (%d)" % (self.maxTool+1, self.settings.nextruders)) slCfg, filSiz, tempsHE, tempsBed = parseGCSuffix(gc) if tempsBed == "??": tBed = 0 else: try: tBed = int(float(tempsBed)) except: tBed = 0 if tempsHE == "??": tHe = [0] * self.settings.nextruders else: try: x = [int(float(x)) for x in re.split(", *", tempsHE)] + [0]*self.settings.nextruders tHe = x[:self.settings.nextruders] except: tHe = [0] * self.settings.nextruders self.parent.registerGCodeTemps(tHe, tBed) self.propDlg.setProperty(PropertyEnum.slicerCfg, slCfg) self.propDlg.setProperty(PropertyEnum.filamentSize, filSiz) self.propDlg.setProperty(PropertyEnum.temperatures, "HE:%s BED:%s" % (tempsHE, tempsBed)) def updatePrintPosition(self, position): self.printLayer = self.getLayerByPosition(position) self.printPosition = position if self.state in [PrintState.printing, PrintState.sdprintingto]: posString = "%d/%d" % (position, self.maxLine) if self.maxLine != 0: pct = float(position) / float(self.maxLine) * 100.0 posString += " (%.1f%%)" % pct self.propDlg.setProperty(PropertyEnum.position, posString) self.gcf.setPrintPosition(position) lx = self.gcf.getCurrentLayer() if lx != self.currentLayer: self.changeLayer(lx) layersSplit = self.sumLayerTimes(self.printLayer) layerSplit = self.partialPrintingLayer() self.elapsed = time.time() - self.startTime expected = layersSplit[0] + layerSplit[0] elapsedStr = "%s (expected: %s)" % (formatElapsed(self.elapsed), formatElapsed(expected)) self.propDlg.setProperty(PropertyEnum.elapsed, elapsedStr) self.remaining = layersSplit[1] + layerSplit[1] self.propDlg.setProperty(PropertyEnum.remaining, formatElapsed(self.remaining)) #TODO - probably don't need all the various time estimates when printing TO CD # BUT IT MAY BREAK LOGIC BELOW (update time until) that rely on these values newEta = time.time() + self.remaining revisedStr = time.strftime('%H:%M:%S', time.localtime(newEta)) tdiff = newEta - self.origEta if tdiff < 0: revisedStr += " (%s ahead of estimate)" % formatElapsed(-tdiff) elif tdiff > 0: revisedStr += " (%s behind estimate)" % formatElapsed(tdiff) self.propDlg.setProperty(PropertyEnum.revisedEta, revisedStr) self.updateTimeUntil() elif self.state == PrintState.sdprintingfrom: #TODO Need to convey print position when printing from SD card pass def getLayerByPosition(self, pos): for lx in range(len(self.layerMap)): if self.layerMap[lx][0] <= pos and pos <= self.layerMap[lx][1]: return lx return 0 def partialPrintingLayer(self): f, l = self.gObj.getGCodeLines(self.printLayer) if f <= self.printPosition and self.printPosition <= l: done = self.printPosition - f todo = l - self.printPosition + 1 total = l - f + 1 lt = self.layerTimes[self.printLayer] pctDone = float(done) / float(total) pctToDo = float(todo) / float(total) return (pctDone*lt, pctToDo*lt) else: return (0.0, 0.0) def sumLayerTimes(self, lx): tBefore = sum(self.layerTimes[:lx]) tAfter = sum(self.layerTimes[lx+1:]) return (tBefore, tAfter) def sumLayerRangeTime(self, slx, elx): return sum(self.layerTimes[slx:elx]) def changeLayer(self, lx): self.currentLayer = lx self.slLayers.SetValue(lx) ht = self.gObj.getLayerHeight(lx) self.setLayerText(ht) if ht is None: self.propDlg.setProperty(PropertyEnum.layerNum, "%d / %d" % (lx, self.gObj.layerCount())) else: self.propDlg.setProperty(PropertyEnum.layerNum, "%d / %d (%.2f mm) " % (lx, self.gObj.layerCount(), ht)) f, l = self.gObj.getGCodeLines(lx) if f is None: self.propDlg.setProperty(PropertyEnum.gCodeRange, "") self.layerRange = (0, 0) else: self.propDlg.setProperty(PropertyEnum.gCodeRange, "%d - %d" % (f, l)) self.layerRange = (f, l) x0, y0, xn, yn = self.gObj.getLayerMinMaxXY(lx) if x0 is None: self.propDlg.setProperty(PropertyEnum.minMaxXY, "") else: self.propDlg.setProperty(PropertyEnum.minMaxXY, "(%.2f, %.2f) - (%.2f, %.2f)" % (x0, y0, xn, yn)) le, prior, after = self.gObj.getLayerFilament(lx) s = [] for i in range(self.settings.nextruders): s.append("%.2f/%.2f <: %.2f >: %.2f" % (le[i], self.eUsed[i], prior[i], after[i])) self.propDlg.setProperty(PropertyEnum.filamentUsed, s) self.propDlg.setProperty(PropertyEnum.layerPrintTime, "%s / %s" % (self.layerTimeStr[lx], self.totalTimeStr)) self.updateTimeUntil() def updateTimeUntil(self): if self.currentLayer <= self.printLayer: self.propDlg.setProperty(PropertyEnum.timeUntil, "") elif self.printPosition is None: t = sum(self.layerTimes[:self.currentLayer]) self.propDlg.setProperty(PropertyEnum.timeUntil, formatElapsed(t)) else: t = sum(self.layerTimes[self.printLayer+1:self.currentLayer]) + self.partialPrintingLayer()[1] self.propDlg.setProperty(PropertyEnum.timeUntil, formatElapsed(t)) def reprapEvent(self, evt): if evt.event == RepRapEventEnum.PRINT_COMPLETE: # TODO - do I need special consideration here for print FROM SD if self.state == PrintState.sdprintingto: self.reprap.sendNow("M29 %s" % self.sdTargetFile) self.reprap.suspendTempProbe(False) self.setSDTargetFile(None) if self.state == PrintState.printing: self.history.addEvent(PrintCompleted(self.history.addFile(self.gcodeFile), "")) self.state = PrintState.idle self.oldState = None self.propDlg.setPrintStatus(PrintState.idle) self.gcf.setPrintPosition(-1) self.printPosition = None self.printLayer = 0 self.enableButtonsByState() self.elapsed = time.time() - self.startTime cmpTime = time.time() expCmpTime = self.origEta - self.startTime cmpTimeStr = time.strftime('%H:%M:%S', time.localtime(cmpTime)) self.log("Print completed at %s" % (cmpTimeStr)) self.log("Total print time of %s - expected print time %s" % (formatElapsed(self.elapsed), formatElapsed(expCmpTime))) self.reprap.printComplete() elif evt.event == RepRapEventEnum.PRINT_STOPPED: if self.state != PrintState.paused: self.oldState = self.state self.state = PrintState.paused self.propDlg.setPrintStatus(PrintState.paused) self.enableButtonsByState() self.reprap.printStopped() elif evt.event == RepRapEventEnum.PRINT_STARTED: pass elif evt.event == RepRapEventEnum.PRINT_RESUMED: pass elif evt.event == RepRapEventEnum.PRINT_ERROR: self.log("Error communicating with printer") elif evt.event == RepRapEventEnum.PRINT_SENDGCODE: self.log(evt.msg) else: self.log("unknown reprap event: %s" % str(evt.event)) def buildModel(self): cnc = CNC(self.settings.acceleration, self.settings.layerheight) if RECORD_TIMES: self.log("recording g code times in /tmp/gcodeTimes") fp = open("/tmp/gcodeTimes", "w") ln = -1 for gl in self.gcode: ln += 1 p = re.split("\\s+", gl, 1) params = {} if not (p[0].strip() in ["M117", "m117"]): if len(p) >= 2: self.paramStr = p[1] if "X" in self.paramStr: params["X"] = self._get_float("X") if "Y" in self.paramStr: params["Y"] = self._get_float("Y") if "Z" in self.paramStr: params["Z"] = self._get_float("Z") if "E" in self.paramStr: params["E"] = self._get_float("E") if "F" in self.paramStr: params["F"] = self._get_float("F") if "S" in self.paramStr: params["S"] = self._get_float("S") if "P" in self.paramStr: params["P"] = self._get_float("P") t = cnc.execute(p[0], params, ln) if RECORD_TIMES: fp.write("(%s) (%.3f)\n" % (gl, t)) if RECORD_TIMES: fp.close() gobj = cnc.getGObject() gobj.setMaxLine(ln) self.maxTool = cnc.getMaxTool() self.totalTime, self.layerTimes = cnc.getTimes() self.layerMap = [] for lx in range(len(gobj)): self.layerMap.append(gobj.getGCodeLines(lx)) self.totalTimeStr = formatElapsed(self.totalTime) self.layerTimeStr = [formatElapsed(s) for s in self.layerTimes] return gobj def _get_float(self,which): try: return float(gcRegex.findall(self.paramStr.split(which)[1])[0]) except: self.log("unable to parse float from (%s)" % self.paramStr) def enableButtonsByState(self): if self.state == PrintState.idle: self.bOpen.Enable(True) self.setImportFile(self.importFile) self.bImportQ.Enable(self.okToImport) if self.sdcard: self.bSdPrintTo.Enable(self.gcodeLoaded) self.bSdPrintFrom.Enable() self.bSdDelete.Enable() if self.gcodeLoaded: self.bPrint.Enable(True) self.bPrint.setPrint() self.bPause.Enable(False) self.bPause.setPause() else: self.bPrint.Enable(False) self.bPause.Enable(False) elif self.state in [PrintState.printing, PrintState.sdprintingto]: self.bImport.Enable(False) self.bImportQ.Enable(False) self.bOpen.Enable(False) self.bPrint.Enable(False) self.bPrint.setPrint() self.bPause.Enable(True); self.bPause.setPause() if self.sdcard: self.bSdPrintTo.Enable(False) self.bSdPrintFrom.Enable(False) self.bSdDelete.Enable(False) elif self.state == PrintState.sdprintingfrom: #TODO - what makes sense here pass elif self.state == PrintState.paused: self.bOpen.Enable(True) self.setImportFile(self.importFile) self.bImportQ.Enable(self.okToImport) self.bPrint.Enable(True) self.bPrint.setRestart() self.bPause.Enable(True); self.bPause.setResume() if self.sdcard: self.bSdPrintTo.Enable(self.gcodeLoaded) self.bSdPrintFrom.Enable() self.bSdDelete.Enable() def emulatePrintButton(self): if self.state in [PrintState.printing, PrintState.sdprintingto, PrintState.sdprintingfrom]: self.log("Already printing") elif not self.bPrint.IsEnabled(): self.log("Unable to print right now") else: self.onPrint(None) def reset(self): #TODO - cleanup if was sdprintingfrom self.state = PrintState.idle self.oldState = None self.reprap.suspendTempProbe(False) self.setSDTargetFile(None) self.propDlg.setPrintStatus(PrintState.idle) self.enableButtonsByState() def onPrint(self, evt): oldState = self.state self.state = PrintState.printing self.propDlg.setPrintStatus(PrintState.printing) self.enableButtonsByState() self.printPos = 0 self.startTime = time.time() self.endTime = None self.origEta = self.startTime + self.totalTime self.elapsed = 0 self.remaining = self.totalTime if oldState == PrintState.paused: action = "restarted" self.reprap.restartPrint(self.gcode) else: action = "started" self.reprap.startPrint(self.gcode) self.history.addEvent(PrintStarted(self.history.addFile(self.gcodeFile), "")) stime = time.strftime('%H:%M:%S', time.localtime(self.startTime)) self.propDlg.setProperty(PropertyEnum.startTime, stime) self.propDlg.setProperty(PropertyEnum.origEta, time.strftime('%H:%M:%S', time.localtime(self.origEta))) self.propDlg.setProperty(PropertyEnum.elapsed, formatElapsed(self.elapsed)) self.propDlg.setProperty(PropertyEnum.remaining, formatElapsed(self.remaining)) self.propDlg.setProperty(PropertyEnum.revisedEta, "") self.log("Print %s at %s" % (action, stime)) def onSdPrintFrom(self, evt): print "sd print from" def doSDPrintFrom(self, evt): self.printing = False self.paused = False self.sdpaused = False self.sdprintingfrom = True self.sdStartTime = time.time() #self.infoPane.setSDStartTime(self.sdStartTime) self.state = PrintState.printing #self.propDlg.setPrintStatus(PrintState.printing) self.enableButtonsByState() self.sdcard.startPrintFromSD() def cancelSDPrintFrom(self): self.sdprintingfrom = False self.printing = False self.paused = False self.state = PrintState.idle #self.propDlg.setPrintStatus(PrintState.printing) self.enableButtonsByState() def resumeSDPrintFrom(self, fn): #self.clearModel() self.reprap.sendNow("M23 " + fn[1].lower()) self.reprap.sendNow("M24") self.sdprintingfrom = True #self.M27Timer.Start(M27Interval, True) self.sdpaused = False #self.infoPane.setMode(MODE_FROM_SD) self.enableButtonsByState() def onSdPrintTo(self, evt): self.sdcard.startPrintToSD() def resumeSDPrintTo(self, tfn): self.setSDTargetFile(tfn[1].lower()) self.reprap.suspendTempProbe(True) self.reprap.sendNow("M28 %s" % self.sdTargetFile) self.printPos = 0 self.startTime = time.time() self.endTime = None self.reprap.startPrint(self.gcode) self.origEta = self.startTime + self.totalTime self.elapsed = 0 self.remaining = self.totalTime self.state = PrintState.sdprintingto stime = time.strftime('%H:%M:%S', time.localtime(self.startTime)) self.propDlg.setProperty(PropertyEnum.startTime, stime) self.propDlg.setProperty(PropertyEnum.origEta, time.strftime('%H:%M:%S', time.localtime(self.origEta))) self.propDlg.setProperty(PropertyEnum.elapsed, formatElapsed(self.elapsed)) self.propDlg.setProperty(PropertyEnum.remaining, formatElapsed(self.remaining)) self.propDlg.setProperty(PropertyEnum.revisedEta, "") self.log("Print to SD: %s started at %s" % (self.sdTargetFile, stime)) self.enableButtonsByState() def setSDTargetFile(self, tfn): self.sdTargetFile = tfn self.propDlg.setSDTargetFile(tfn) def onSdDelete(self, evt): self.sdcard.startDeleteFromSD() def emulatePauseButton(self): if not self.bPause.IsEnabled(): self.log("Unable to pause right now") else: self.onPause(None) def onPause(self, evt): if self.state == PrintState.paused: self.state = self.oldState if self.state is None: self.state = PrintState.printing self.propDlg.setPrintStatus(self.state) self.enableButtonsByState() self.reprap.resumePrint() else: self.oldState = self.state self.state = PrintState.paused self.propDlg.setPrintStatus(PrintState.paused) self.enableButtonsByState() self.reprap.pausePrint()
jbernardis/repraptoolbox
src/Printer/printmon.py
Python
gpl-3.0
26,866
0.039902
"""empty message Revision ID: 8e7f7864cb60 Revises: ('80a704b880db', 'adf34c11b0df') Create Date: 2016-06-19 15:43:23.027000 """ # revision identifiers, used by Alembic. revision = '8e7f7864cb60' down_revision = ('80a704b880db', 'adf34c11b0df') from alembic import op import sqlalchemy as sa import sqlalchemy_utils def upgrade(): pass def downgrade(): pass
Achint08/open-event-orga-server
migrations/versions/8e7f7864cb60_.py
Python
gpl-3.0
374
0.008021
# -*- coding: utf-8 -*- ''' FanFilm Add-on Copyright (C) 2016 mrknow This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import re,urllib,urlparse, json, base64 from resources.lib.libraries import cleantitle from resources.lib.libraries import client from resources.lib.libraries import control from resources.lib import resolvers class source: def __init__(self): self.base_link = 'http://segos.es' self.search_link = '/?search=%s' #self.episode_link = '-Season-%01d-Episode-%01d' def get_movie(self, imdb, title, year): try: query = self.search_link % (urllib.quote_plus(cleantitle.query2(title))) query = urlparse.urljoin(self.base_link, query) result = client.request(query) title = cleantitle.movie(title) result = client.parseDOM(result, 'div', attrs={'style':'overflow: hidden; margin-top: 15px;'}) result = [( client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'a')[1], str(re.findall(r"(\d{4})", client.parseDOM(i, 'a')[1])[0])) for i in result] years = ['%s' % str(year), '%s' % str(int(year)+1), '%s' % str(int(year)-1)] result = [i for i in result if title in cleantitle.movie(i[1])] result = [i[0] for i in result if any(x in i[2] for x in years)][0] try: url = re.compile('//.+?(/.+)').findall(result)[0] except: url = result url = client.replaceHTMLCodes(url) url = url.encode('utf-8') control.log('Segos URL %s' % url) return url except: return def get_show(self, imdb, tvdb, tvshowtitle, year): try: query = self.moviesearch_link % (urllib.unquote(tvshowtitle)) query = urlparse.urljoin(self.base_link, query) result = client.source(query) result = json.loads(result) tvshowtitle = cleantitle.tv(tvshowtitle) years = ['%s' % str(year), '%s' % str(int(year)+1), '%s' % str(int(year)-1)] result = [(client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'h2', ret='title')[0], client.parseDOM(i, 'span', attrs = {'itemprop': 'copyrightYear'})) for i in result] result = [i for i in result if len(i[2]) > 0] result = [i for i in result if tvshowtitle == cleantitle.tv(i[1])] result = [i[0] for i in result if any(x in i[2][0] for x in years)][0] try: url = re.compile('//.+?(/.+)').findall(result)[0] except: url = result url = client.replaceHTMLCodes(url) url = url.encode('utf-8') return url except: return def get_episode(self, url, imdb, tvdb, title, date, season, episode): if url == None: return url += self.episode_link % (int(season), int(episode)) url = client.replaceHTMLCodes(url) url = url.encode('utf-8') return url def get_sources(self, url, hosthdDict, hostDict, locDict): try: sources = [] if url == None: return sources url = urlparse.urljoin(self.base_link, url) result = client.request(url) vtype = re.findall('<div class="col-lg-9 col-md-9 col-sm-9">\s.*<b>Język</b>:(.*?)\.*</div>',result)[0].strip() q = re.findall('<div class="col-lg-9 col-md-9 col-sm-9">\s.*<b>Jakość</b>:(.*?)\.*</div>', result)[0].strip() quality = 'SD' if '720' in q: quality = 'HD' if '1080' in q: quality = '1080p' links = client.parseDOM(result, 'div', attrs={'id':'Film'}) links = [client.parseDOM(i, 'a', ret='href', attrs={'target':'_blank'})[0] for i in links] for i in links: try: host = urlparse.urlparse(i).netloc host = host.split('.') host = host[-2]+"."+host[-1] host = host.lower() host = client.replaceHTMLCodes(host) host = host.encode('utf-8') sources.append({'source': host, 'quality': quality, 'provider': 'SEGOS', 'url': i, 'vtype':vtype}) except: pass return sources except: return sources def resolve(self, url): control.log('CDA-ONLINE RESOLVE URL %s' % url) try: url = resolvers.request(url) return url except: return
mrknow/filmkodi
plugin.video.fanfilm/resources/lib/sources/segos_mv.py
Python
apache-2.0
5,195
0.009438
# -*- coding: utf-8 -*- """ *************************************************************************** FindProjection.py ----------------- Date : February 2017 Copyright : (C) 2017 by Nyall Dawson Email : nyall dot dawson at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Nyall Dawson' __date__ = 'February 2017' __copyright__ = '(C) 2017, Nyall Dawson' import os from qgis.core import (QgsGeometry, QgsFeature, QgsFeatureSink, QgsField, QgsFields, QgsCoordinateReferenceSystem, QgsCoordinateTransform, QgsCoordinateTransformContext, QgsWkbTypes, QgsProcessingException, QgsProcessingParameterFeatureSource, QgsProcessingParameterExtent, QgsProcessingParameterCrs, QgsProcessingParameterFeatureSink, QgsProcessingParameterDefinition) from qgis.PyQt.QtCore import QVariant from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0] class FindProjection(QgisAlgorithm): INPUT = 'INPUT' TARGET_AREA = 'TARGET_AREA' TARGET_AREA_CRS = 'TARGET_AREA_CRS' OUTPUT = 'OUTPUT' def tags(self): return self.tr('crs,srs,coordinate,reference,system,guess,estimate,finder,determine').split(',') def group(self): return self.tr('Vector general') def groupId(self): return 'vectorgeneral' def __init__(self): super().__init__() def initAlgorithm(self, config=None): self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT, self.tr('Input layer'))) extent_parameter = QgsProcessingParameterExtent(self.TARGET_AREA, self.tr('Target area for layer')) self.addParameter(extent_parameter) # deprecated crs_param = QgsProcessingParameterCrs(self.TARGET_AREA_CRS, 'Target area CRS', optional=True) crs_param.setFlags(crs_param.flags() | QgsProcessingParameterDefinition.FlagHidden) self.addParameter(crs_param) self.addParameter(QgsProcessingParameterFeatureSink(self.OUTPUT, self.tr('CRS candidates'))) def name(self): return 'findprojection' def displayName(self): return self.tr('Find projection') def processAlgorithm(self, parameters, context, feedback): source = self.parameterAsSource(parameters, self.INPUT, context) if source is None: raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT)) extent = self.parameterAsExtent(parameters, self.TARGET_AREA, context) target_crs = self.parameterAsExtentCrs(parameters, self.TARGET_AREA, context) if self.TARGET_AREA_CRS in parameters: c = self.parameterAsCrs(parameters, self.TARGET_AREA_CRS, context) if c.isValid(): target_crs = c target_geom = QgsGeometry.fromRect(extent) fields = QgsFields() fields.append(QgsField('auth_id', QVariant.String, '', 20)) (sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context, fields, QgsWkbTypes.NoGeometry, QgsCoordinateReferenceSystem()) if sink is None: raise QgsProcessingException(self.invalidSinkError(parameters, self.OUTPUT)) # make intersection tests nice and fast engine = QgsGeometry.createGeometryEngine(target_geom.constGet()) engine.prepareGeometry() layer_bounds = QgsGeometry.fromRect(source.sourceExtent()) crses_to_check = QgsCoordinateReferenceSystem.validSrsIds() total = 100.0 / len(crses_to_check) found_results = 0 transform_context = QgsCoordinateTransformContext() for current, srs_id in enumerate(crses_to_check): if feedback.isCanceled(): break candidate_crs = QgsCoordinateReferenceSystem.fromSrsId(srs_id) if not candidate_crs.isValid(): continue transform_candidate = QgsCoordinateTransform(candidate_crs, target_crs, transform_context) transformed_bounds = QgsGeometry(layer_bounds) try: if not transformed_bounds.transform(transform_candidate) == 0: continue except: continue try: if engine.intersects(transformed_bounds.constGet()): feedback.pushInfo(self.tr('Found candidate CRS: {}').format(candidate_crs.authid())) f = QgsFeature(fields) f.setAttributes([candidate_crs.authid()]) sink.addFeature(f, QgsFeatureSink.FastInsert) found_results += 1 except: continue feedback.setProgress(int(current * total)) if found_results == 0: feedback.reportError(self.tr('No matching projections found')) return {self.OUTPUT: dest_id}
SrNetoChan/QGIS
python/plugins/processing/algs/qgis/FindProjection.py
Python
gpl-2.0
6,023
0.00249
""" This page is in the table of contents. Temperature is a script to set the temperature for the object and raft. ==Operation== The default 'Activate Temperature' checkbox is on. When it is on, the functions described below will work, when it is off, the functions will not be called. ==Settings== ===Rate=== The default cooling rate and heating rate for the extruder were both been derived from bothacker's graph at: http://bothacker.com/wp-content/uploads/2009/09/18h5m53s9.29.2009.png ====Cooling Rate==== Default is three degrees Celcius per second. Defines the cooling rate of the extruder. ====Heating Rate==== Default is ten degrees Celcius per second. Defines the heating rate of the extruder. ===Temperature=== ====Base Temperature==== Default for ABS is two hundred degrees Celcius. Defines the raft base temperature. ====Interface Temperature==== Default for ABS is two hundred degrees Celcius. Defines the raft interface temperature. ====Object First Layer Infill Temperature==== Default for ABS is 195 degrees Celcius. Defines the infill temperature of the first layer of the object. ====Object First Layer Perimeter Temperature==== Default for ABS is two hundred and twenty degrees Celcius. Defines the perimeter temperature of the first layer of the object. ====Object Next Layers Temperature==== Default for ABS is two hundred and thirty degrees Celcius. Defines the temperature of the next layers of the object. ====Support Layers Temperature==== Default for ABS is two hundred degrees Celcius. Defines the support layers temperature. ====Supported Layers Temperature==== Default for ABS is two hundred and thirty degrees Celcius. Defines the temperature of the supported layers of the object, those layers which are right above a support layer. ==Examples== The following examples add temperature information to the file Screw Holder Bottom.stl. The examples are run in a terminal in the folder which contains Screw Holder Bottom.stl and temperature.py. > python temperature.py This brings up the temperature dialog. > python temperature.py Screw Holder Bottom.stl The temperature tool is parsing the file: Screw Holder Bottom.stl .. The temperature tool has created the file: .. Screw Holder Bottom_temperature.gcode > python Python 2.5.1 (r251:54863, Sep 22 2007, 01:43:31) [GCC 4.2.1 (SUSE Linux)] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> import temperature >>> temperature.main() This brings up the temperature dialog. >>> temperature.writeOutput('Screw Holder Bottom.stl') The temperature tool is parsing the file: Screw Holder Bottom.stl .. The temperature tool has created the file: .. Screw Holder Bottom_temperature.gcode """ from __future__ import absolute_import #Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module. import __init__ from fabmetheus_utilities.fabmetheus_tools import fabmetheus_interpret from fabmetheus_utilities import archive from fabmetheus_utilities import euclidean from fabmetheus_utilities import gcodec from fabmetheus_utilities import intercircle from fabmetheus_utilities import settings from skeinforge_application.skeinforge_utilities import skeinforge_craft from skeinforge_application.skeinforge_utilities import skeinforge_polyfile from skeinforge_application.skeinforge_utilities import skeinforge_profile import math import sys __author__ = 'Enrique Perez (perez_enrique@yahoo.com)' __date__ = '$Date: 2008/21/04 $' __license__ = 'GPL 3.0' def getCraftedText( fileName, text = '', repository=None): "Temperature the file or text." return getCraftedTextFromText( archive.getTextIfEmpty( fileName, text ), repository ) def getCraftedTextFromText(gcodeText, repository=None): "Temperature a gcode linear move text." if gcodec.isProcedureDoneOrFileIsEmpty( gcodeText, 'temperature'): return gcodeText if repository == None: repository = settings.getReadRepository( TemperatureRepository() ) if not repository.activateTemperature.value: return gcodeText return TemperatureSkein().getCraftedGcode(gcodeText, repository) def getNewRepository(): "Get the repository constructor." return TemperatureRepository() def writeOutput(fileName=''): "Temperature a gcode linear move file." fileName = fabmetheus_interpret.getFirstTranslatorFileNameUnmodified(fileName) if fileName != '': skeinforge_craft.writeChainTextWithNounMessage( fileName, 'temperature') class TemperatureRepository: "A class to handle the temperature settings." def __init__(self): "Set the default settings, execute title & settings fileName." skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.craft_plugins.temperature.html', self ) self.fileNameInput = settings.FileNameInput().getFromFileName( fabmetheus_interpret.getGNUTranslatorGcodeFileTypeTuples(), 'Open File for Temperature', self, '') self.activateTemperature = settings.BooleanSetting().getFromValue('Activate Temperature:', self, True ) settings.LabelSeparator().getFromRepository(self) settings.LabelDisplay().getFromName('- Rate -', self ) self.coolingRate = settings.FloatSpin().getFromValue( 1.0, 'Cooling Rate (Celcius/second):', self, 20.0, 3.0 ) self.heatingRate = settings.FloatSpin().getFromValue( 1.0, 'Heating Rate (Celcius/second):', self, 20.0, 10.0 ) settings.LabelSeparator().getFromRepository(self) settings.LabelDisplay().getFromName('- Temperature -', self ) self.baseTemperature = settings.FloatSpin().getFromValue( 140.0, 'Base Temperature (Celcius):', self, 260.0, 200.0 ) self.interfaceTemperature = settings.FloatSpin().getFromValue( 140.0, 'Interface Temperature (Celcius):', self, 260.0, 200.0 ) self.objectFirstLayerInfillTemperature = settings.FloatSpin().getFromValue( 140.0, 'Object First Layer Infill Temperature (Celcius):', self, 260.0, 195.0 ) self.objectFirstLayerPerimeterTemperature = settings.FloatSpin().getFromValue( 140.0, 'Object First Layer Perimeter Temperature (Celcius):', self, 260.0, 220.0 ) self.objectNextLayersTemperature = settings.FloatSpin().getFromValue( 140.0, 'Object Next Layers Temperature (Celcius):', self, 260.0, 230.0 ) self.supportLayersTemperature = settings.FloatSpin().getFromValue( 140.0, 'Support Layers Temperature (Celcius):', self, 260.0, 200.0 ) self.supportedLayersTemperature = settings.FloatSpin().getFromValue( 140.0, 'Supported Layers Temperature (Celcius):', self, 260.0, 230.0 ) self.executeTitle = 'Temperature' def execute(self): "Temperature button has been clicked." fileNames = skeinforge_polyfile.getFileOrDirectoryTypesUnmodifiedGcode(self.fileNameInput.value, fabmetheus_interpret.getImportPluginFileNames(), self.fileNameInput.wasCancelled) for fileName in fileNames: writeOutput(fileName) class TemperatureSkein: "A class to temperature a skein of extrusions." def __init__(self): self.distanceFeedRate = gcodec.DistanceFeedRate() self.lineIndex = 0 self.lines = None def getCraftedGcode(self, gcodeText, repository): "Parse gcode text and store the temperature gcode." self.repository = repository self.lines = archive.getTextLines(gcodeText) if self.repository.coolingRate.value < 0.1: print('The cooling rate should be more than 0.1, any cooling rate less than 0.1 will be treated as 0.1.') self.repository.coolingRate.value = 0.1 if self.repository.heatingRate.value < 0.1: print('The heating rate should be more than 0.1, any heating rate less than 0.1 will be treated as 0.1.') self.repository.heatingRate.value = 0.1 self.parseInitialization() self.distanceFeedRate.addLines( self.lines[self.lineIndex :] ) return self.distanceFeedRate.output.getvalue() def parseInitialization(self): 'Parse gcode initialization and store the parameters.' for self.lineIndex in xrange(len(self.lines)): line = self.lines[self.lineIndex] splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line) firstWord = gcodec.getFirstWord(splitLine) self.distanceFeedRate.parseSplitLine(firstWord, splitLine) if firstWord == '(</extruderInitialization>)': self.distanceFeedRate.addLine('(<procedureDone> temperature </procedureDone>)') return elif firstWord == '(<perimeterWidth>': self.distanceFeedRate.addTagBracketedLine('coolingRate', self.repository.coolingRate.value ) self.distanceFeedRate.addTagBracketedLine('heatingRate', self.repository.heatingRate.value ) self.distanceFeedRate.addTagBracketedLine('baseTemperature', self.repository.baseTemperature.value ) self.distanceFeedRate.addTagBracketedLine('interfaceTemperature', self.repository.interfaceTemperature.value ) self.distanceFeedRate.addTagBracketedLine('objectFirstLayerInfillTemperature', self.repository.objectFirstLayerInfillTemperature.value ) self.distanceFeedRate.addTagBracketedLine('objectFirstLayerPerimeterTemperature', self.repository.objectFirstLayerPerimeterTemperature.value ) self.distanceFeedRate.addTagBracketedLine('objectNextLayersTemperature', self.repository.objectNextLayersTemperature.value ) self.distanceFeedRate.addTagBracketedLine('supportLayersTemperature', self.repository.supportLayersTemperature.value ) self.distanceFeedRate.addTagBracketedLine('supportedLayersTemperature', self.repository.supportedLayersTemperature.value ) self.distanceFeedRate.addLine(line) def main(): "Display the temperature dialog." if len(sys.argv) > 1: writeOutput(' '.join(sys.argv[1 :])) else: settings.startMainLoopFromConstructor( getNewRepository() ) if __name__ == "__main__": main()
makerbot/ReplicatorG
skein_engines/skeinforge-35/skeinforge_application/skeinforge_plugins/craft_plugins/temperature.py
Python
gpl-2.0
9,647
0.017829
#PyInterstate by @pygeek import urllib2 import json class InterstateError(Exception): """Base class for Interstate App Exceptions.""" pass class AuthError(InterstateError): """Exception raised upon authentication errors.""" pass class IdError(InterstateError): """Raised when an operation attempts to query's an Interstate \ Road or Roadmap that does not exist. """ pass class InterstateApp(object): """Pythonic Interstate App API Wrapper (http://interstateapp.com) Requires: -Python 2.6+ """ __version__ = "0.2.0" def __init__(self): self.protocol = "http://" self.base_url = "interstateapp.com" self.api_version = "v1" public_key = "public-key" private_key = "private-key" """Installing opener authentication for inevitable, \ subsequent requests """ # create a password manager password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, self.protocol + \ self.base_url, public_key, private_key) handler = urllib2.HTTPBasicAuthHandler(password_mgr) # create "opener" (OpenerDirector instance) urlopener = urllib2.build_opener(handler) urllib2.install_opener(urlopener) def auth_test(self): """Authenticate Interstate App credentials; *public_key* : *private_key*.""" listAll_url = "{0}{1}/api/{2}/roadmap/listAll" \ .format(self.protocol, self.base_url, self.api_version) try: urllib2.urlopen(listAll_url) except: raise AuthError("Authentication Error: Please verify credentials.") return True def id_test(self,roadmap_id=None,road_id=None): """Verify whether Road or Roadmap Id exists.""" roadmap_get = "{0}{1}/api/{2}/roadmap/get/id/{3}" \ .format(self.protocol, self.base_url, self.api_version, \ roadmap_id) road_get = "{0}{1}/api/{2}/road/get/id/{3}" \ .format(self.protocol, self.base_url, self.api_version, \ road_id) if roadmap_id: try: urllib2.urlopen(roadmap_get) except: raise IdError("Id Error: Roadmap Id \"{0}\" does not exist.".format(roadmap_id)) return True elif road_id: try: urllib2.urlopen(road_get) except: raise IdError("Id Error: Roadmap Id \"{0}\" does not exist.".format(road_id)) return True class Roadmap(InterstateApp): """Contains methods for the Roadmap object.""" def get(self, roadmap_id): """roadmap/get: Retrieve information regarding a specific \ Interstate roadmap. Parameters: - id(Roadmap ID) The unique id of the Interstate roadmap. Example Request: http://interstateapp.com/api/v1/roadmap/get/id/ \ 4c2d3b5f8ead0ec070010000 Outputs: JSON See: http://interstateapp.com/developers/method/0/0 """ if self.auth_test() and self.id_test(roadmap_id=roadmap_id): get_url = "{0}{1}/api/{2}/roadmap/get/id/{3}" \ .format(self.protocol, self.base_url, self.api_version, \ roadmap_id) roadmap = urllib2.urlopen(get_url) roadmap = roadmap.read() return json.loads(roadmap) else: return False def listAll(self): """roadmap/listAll: List all Interstate roadmaps associated with the \ used API Key. Parameters: None Example Request: http://interstateapp.com/api/v1/roadmap/listAll Outputs: JSON See: http://interstateapp.com/developers/method/0/1 """ if self.auth_test(): listAll_url = "{0}{1}/api/{2}/roadmap/listAll" \ .format(self.protocol, self.base_url, self.api_version) roadmap = urllib2.urlopen(listAll_url) roadmap = roadmap.read() return json.loads(roadmap) else: return False def roads(self, roadmap_id): """roadmap/roads: List all roads attached to the specific Interstate roadmap. Parameters: - id(Roadmap ID) The unique id of the Interstate roadmap. Example Request: http://interstateapp.com/api/v1/roadmap/roads/id/ \ 4c2d3b5f8ead0ec070010000 Outputs: JSON See: http://interstateapp.com/developers/method/0/2 """ if self.auth_test() and self.id_test(roadmap_id=roadmap_id): roads_url = "{0}{1}/api/{2}/roadmap/roads/id/{3}" \ .format(self.protocol, self.base_url, self.api_version, \ roadmap_id) roadmap = urllib2.urlopen(roads_url) roadmap = roadmap.read() return json.loads(roadmap) else: return False class Road(InterstateApp): """Contains methods for the Road object.""" def get(self, road_id): """road/get: Retrieve information regarding a specific Interstate road. Parameters: - id(Road ID) The unique id of the Interstate road. Example Request: http://interstateapp.com/api/v1/road/get/id/ \ 4c2d3b5f8ead0ec070010000 Outputs: JSON See: http://interstateapp.com/developers/method/1/0 """ if self.auth_test() and self.id_test(road_id=road_id): get_url = "{0}{1}/api/{2}/road/get/id/{3}" \ .format(self.protocol, self.base_url, self.api_version, \ road_id) road = urllib2.urlopen(get_url) road = road.read() return json.loads(road) else: return False def updates(self, road_id): """road/get: Retrieve updates attached to a specific Interstate road. Parameters: - id(Road ID) The unique id of the Interstate road. Example Request: http://interstateapp.com/api/v1/road/updates/id/ 4c2d3b5f8ead0ec070010000 Outputs: JSON See: http://interstateapp.com/developers/method/1/1 """ if self.auth_test() and self.id_test(road_id=road_id): updates_url = "{0}{1}/api/{2}/road/updates/id/{3}" \ .format(self.protocol, self.base_url, self.api_version, \ road_id) road = urllib2.urlopen(updates_url) road = road.read() return json.loads(road) else: return False
pygeek/PyInterstate
PyInterstate.py
Python
gpl-3.0
6,996
0.008719
from yum.plugins import PluginYumExit, TYPE_CORE, TYPE_INTERACTIVE try: import json except ImportError: import simplejson as json requires_api_version = '2.5' plugin_type = (TYPE_INTERACTIVE,) def config_hook(conduit): parser = conduit.getOptParser() parser.add_option('', '--json', dest='json', action='store_true', default=False, help="show pending package changes as JSON") def postresolve_hook(conduit): opts, commands = conduit.getCmdLine() if opts.json: packages = {} for transaction in conduit.getTsInfo(): if transaction.name not in packages: packages[transaction.name] = {} version = { "version": transaction.version, "release": transaction.release, "epoch": transaction.epoch, "arch": transaction.arch, "state": transaction.ts_state, "repo": getattr(transaction.po, 'repoid') } if transaction.ts_state: packages[transaction.name]["pending"] = version else: packages[transaction.name]["current"] = version print(json.dumps(packages)) raise PluginYumExit('')
deanwilson/yum-transaction-json
transaction-json.py
Python
gpl-3.0
1,282
0
# Log into the site with your browser, obtain the "Cookie" header, # and put it here cookie = ''
jonmsawyer/site-tools
flgetpics/cookie.py
Python
mit
97
0
from django.forms import * from django.forms.formsets import BaseFormSet from django.utils.translation import ugettext_lazy as _ from django.contrib.sites.models import Site from tradeschool.models import * class DefaultBranchForm(Form): def __init__(self, user, redirect_to, *args, **kwargs): super(DefaultBranchForm, self).__init__(*args, **kwargs) if user.is_superuser: branches = Branch.objects.all() else: branches = Branch.objects.filter(pk__in=user.branches_organized.all) choices = [(o.id, unicode(o.title)) for o in branches] self.fields['default_branch'] = forms.ChoiceField(choices=choices) if user.default_branch: self.initial['default_branch'] = user.default_branch.pk self.initial['organizer_id'] = user.pk self.initial['redirect_to'] = redirect_to default_branch = forms.ChoiceField() organizer_id = forms.IntegerField(widget=forms.HiddenInput) redirect_to = forms.CharField(widget=forms.HiddenInput) class TimeModelChoiceField(forms.ModelChoiceField): def label_from_instance(self, obj): from django.utils import timezone current_tz = timezone.get_current_timezone() date = obj.start_time.astimezone(current_tz).strftime('%A, %b %d') time = obj.start_time.astimezone(current_tz).strftime( '%I:%M%p').lstrip('0').lower() if obj.venue is not None: return "%s %s at %s" % (date, time, obj.venue) return "%s %s" % (date, time) class TimeSelectionForm(Form): """ A simple dropdown menu for teachers to select an available time when submitting a class. Uses the Time model """ time = TimeModelChoiceField( queryset=Time.objects.all(), error_messages={'required': _('Please select a time'), } ) class BranchForm(ModelForm): def __init__(self, *args, **kwargs): super(BranchForm, self).__init__(*args, **kwargs) self.fields['city'].error_messages['required'] = _( "Please enter a city") self.fields['country'].error_messages['required'] = _( "Please enter a country") self.initial['site'] = Site.objects.get_current() class Meta: model = Branch fields = ( 'city', 'state', 'country', ) class TeacherForm (ModelForm): def __init__(self, *args, **kwargs): "Sets custom meta data to the form's fields" super(ModelForm, self).__init__(*args, **kwargs) self.fields['fullname'].error_messages['required'] = _( "Please enter your name") self.fields['email'].error_messages['required'] = _( "Please enter your email") self.fields['bio'].error_messages['required'] = _( "Please tell us about yourself") self.fields['phone'].error_messages['required'] = _( "Please enter phone number") class Meta: model = Person fields = ('fullname', 'email', 'phone', 'bio', 'website') # since bio is set to blank=True in the Person model # to accommodate students, we're setting it here manually. bio = forms.CharField( required=True, label=_("A few sentences about you"), help_text=_("For prospective students to see on the website"), widget=forms.Textarea ) class OrganizerForm(TeacherForm): """ """ def __init__(self, *args, **kwargs): "Sets custom meta data to the form's fields" super(TeacherForm, self).__init__(*args, **kwargs) self.fields['fullname'].error_messages['required'] = _( "Please enter your name") self.fields['email'].error_messages['required'] = _( "Please enter your email") self.fields['names_of_co_organizers'].error_messages['required'] = _( "Please enter the names of at least one or two more organizers") self.fields['bio'].error_messages['required'] = _( "Please tell us about why you would like to open a Trade School in your area") class Meta: model = Person fields = ( 'fullname', 'names_of_co_organizers', 'email', 'bio', ) # since names_of_co_organizers is set to blank=True in the Person model # to accommodate students and teachers, we're setting it here manually. names_of_co_organizers = forms.CharField( required=True, label=_("Names of Co-Organizers"), ) bio = forms.CharField( required=True, label=_("A few sentences about why your group wants to open a Trade School"), widget=forms.Textarea ) class CourseForm (ModelForm): def __init__(self, *args, **kwargs): "Sets custom meta data to the form's fields" super(ModelForm, self).__init__(*args, **kwargs) self.fields['title'].error_messages['required'] = _( "Please enter a class title") self.fields['description'].error_messages['required'] = _( "Please enter a class description") self.fields['max_students'].error_messages['required'] = _( "Please enter the maximum number of students in your class") class Meta: model = Course fields = ('title', 'description', 'max_students') class BarterItemForm (ModelForm): def __init__(self, *args, **kwargs): "Sets custom meta data to the form's fields" super(ModelForm, self).__init__(*args, **kwargs) self.fields['title'].widget.attrs['class'] = 'barter_item' self.fields['title'].error_messages['required'] = _( "Barter item cannot be blank") class Meta: model = BarterItem fields = ('title',) class BaseBarterItemFormSet(BaseFormSet): def __init__(self, branch, *args, **kwargs): "" self.branch = branch super(BaseBarterItemFormSet, self).__init__(*args, **kwargs) def clean(self): "Checks that at least 5 barter items form are filled" count = 0 required = self.branch.min_barteritems if any(self.errors): return for form in self.forms: if form.is_bound: if form['title'].data: count += 1 if count < required: raise forms.ValidationError( _("Please add at least %i barter items" % required) ) class RegistrationForm(ModelForm): def __init__(self, course, *args, **kwargs): super(RegistrationForm, self).__init__(*args, **kwargs) self.fields['items'].queryset = BarterItem.objects.filter( course=course) self.fields['items'].error_messages['required'] = _( "Please select at least one item") self.fields['items'].empty_label = None class Meta: model = Registration fields = ('items', ) widgets = {'items': CheckboxSelectMultiple(), } class StudentForm(ModelForm): def __init__(self, *args, **kwargs): super(StudentForm, self).__init__(*args, **kwargs) self.fields['fullname'].error_messages['required'] = _( "Please enter your name") self.fields['email'].error_messages['required'] = _( "Please enter your email") self.fields['phone'].error_messages['required'] = _( "Please enter your phone number") class Meta: model = Person fields = ('fullname', 'email', 'phone') class FeedbackForm(ModelForm): def __init__(self, *args, **kwargs): super(FeedbackForm, self).__init__(*args, **kwargs) self.fields['content'].error_messages['required'] = _( "Please enter your feedback") class Meta: model = Feedback fields = ('content',)
orzubalsky/tradeschool
ts/apps/tradeschool/forms.py
Python
gpl-3.0
7,844
0.000382
# Copyright (C) 2008-2011 Dejan Muhamedagic <dmuhamedagic@suse.de> # Copyright (C) 2013 Kristoffer Gronlund <kgronlund@suse.com> # See COPYING for license information. from . import command from . import completers as compl from . import utils from . import ra from . import constants from . import options def complete_class_provider_type(args): ''' This is just too complicated to complete properly... ''' ret = set([]) classes = ra.ra_classes() for c in classes: if c != 'ocf': types = ra.ra_types(c) for t in types: ret.add('%s:%s' % (c, t)) providers = ra.ra_providers_all('ocf') for p in providers: types = ra.ra_types('ocf', p) for t in types: ret.add('ocf:%s:%s' % (p, t)) return list(ret) class RA(command.UI): ''' CIB shadow management class ''' name = "ra" provider_classes = ["ocf"] def do_classes(self, context): "usage: classes" for c in ra.ra_classes(): if c in self.provider_classes: providers = ra.ra_providers_all(c) if providers: print "%s / %s" % (c, ' '.join(providers)) else: print "%s" % c @command.skill_level('administrator') def do_providers(self, context, ra_type, ra_class="ocf"): "usage: providers <ra> [<class>]" print ' '.join(ra.ra_providers(ra_type, ra_class)) @command.skill_level('administrator') @command.completers(compl.call(ra.ra_classes), lambda args: ra.ra_providers_all(args[1])) def do_list(self, context, class_, provider_=None): "usage: list <class> [<provider>]" if class_ not in ra.ra_classes(): context.fatal_error("class %s does not exist" % class_) if provider_ and provider_ not in ra.ra_providers_all(class_): context.fatal_error("there is no provider %s for class %s" % (provider_, class_)) types = ra.ra_types(class_, provider_) if options.regression_tests: for t in types: print t else: utils.multicolumn(types) @command.skill_level('administrator') @command.alias('meta') @command.completers(complete_class_provider_type) def do_info(self, context, *args): "usage: info [<class>:[<provider>:]]<type>" if len(args) == 0: context.fatal_error("Expected [<class>:[<provider>:]]<type>") elif len(args) > 1: # obsolete syntax if len(args) < 3: ra_type, ra_class, ra_provider = args[0], args[1], "heartbeat" else: ra_type, ra_class, ra_provider = args[0], args[1], args[2] elif args[0] in constants.meta_progs: ra_class, ra_provider, ra_type = args[0], None, None else: ra_class, ra_provider, ra_type = ra.disambiguate_ra_type(args[0]) agent = ra.RAInfo(ra_class, ra_type, ra_provider) if agent.mk_ra_node() is None: return False try: utils.page_string(agent.meta_pretty()) except Exception, msg: context.fatal_error(msg)
aspiers/crmsh
modules/ui_ra.py
Python
gpl-2.0
3,190
0.000627
"""SCons.Tool.bcc32 XXX """ # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import os import os.path import string import SCons.Defaults import SCons.Tool import SCons.Util def findIt(program, env): # First search in the SCons path and then the OS path: borwin = env.WhereIs(program) or SCons.Util.WhereIs(program) if borwin: dir = os.path.dirname(borwin) path = env['ENV'].get('PATH', []) if not path: path = [] if SCons.Util.is_String(path): path = string.split(path, os.pathsep) env['ENV']['PATH'] = string.join([dir]+path, os.pathsep) return borwin def generate(env): findIt('bcc32', env) """Add Builders and construction variables for bcc to an Environment.""" static_obj, shared_obj = SCons.Tool.createObjBuilders(env) for suffix in ['.c', '.cpp']: static_obj.add_action(suffix, SCons.Defaults.CAction) shared_obj.add_action(suffix, SCons.Defaults.ShCAction) static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter) shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter) env['CC'] = 'bcc32' env['CCFLAGS'] = SCons.Util.CLVar('') env['CFLAGS'] = SCons.Util.CLVar('') env['CCCOM'] = '$CC -q $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' env['SHCC'] = '$CC' env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS') env['SHCCCOM'] = '$SHCC -WD $SHCFLAGS $SHCCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' env['CPPDEFPREFIX'] = '-D' env['CPPDEFSUFFIX'] = '' env['INCPREFIX'] = '-I' env['INCSUFFIX'] = '' env['SHOBJSUFFIX'] = '.dll' env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0 env['CFILESUFFIX'] = '.cpp' def exists(env): return findIt('bcc32', env)
datalogics/scons
src/engine/SCons/Tool/bcc32.py
Python
mit
2,993
0.005012
# Copyright 2011 Andrew Bogott for the Wikimedia Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import mock from oslo.serialization import jsonutils import webob from nova.api.openstack.compute.contrib import flavor_access from nova.api.openstack.compute.contrib import flavormanage as flavormanage_v2 from nova.api.openstack.compute.plugins.v3 import flavor_manage as \ flavormanage_v21 from nova.compute import flavors from nova import context from nova import db from nova import exception from nova import test from nova.tests.unit.api.openstack import fakes def fake_db_flavor(**updates): db_flavor = { 'root_gb': 1, 'ephemeral_gb': 1, 'name': u'frob', 'deleted': False, 'created_at': datetime.datetime(2012, 1, 19, 18, 49, 30, 877329), 'updated_at': None, 'memory_mb': 256, 'vcpus': 1, 'flavorid': 1, 'swap': 0, 'rxtx_factor': 1.0, 'extra_specs': {}, 'deleted_at': None, 'vcpu_weight': None, 'id': 7, 'is_public': True, 'disabled': False, } if updates: db_flavor.update(updates) return db_flavor def fake_get_flavor_by_flavor_id(flavorid, ctxt=None, read_deleted='yes'): if flavorid == 'failtest': raise exception.FlavorNotFound(flavor_id=flavorid) elif not str(flavorid) == '1234': raise Exception("This test expects flavorid 1234, not %s" % flavorid) if read_deleted != 'no': raise test.TestingException("Should not be reading deleted") return fake_db_flavor(flavorid=flavorid) def fake_destroy(flavorname): pass def fake_create(context, kwargs, projects=None): newflavor = fake_db_flavor() flavorid = kwargs.get('flavorid') if flavorid is None: flavorid = 1234 newflavor['flavorid'] = flavorid newflavor["name"] = kwargs.get('name') newflavor["memory_mb"] = int(kwargs.get('memory_mb')) newflavor["vcpus"] = int(kwargs.get('vcpus')) newflavor["root_gb"] = int(kwargs.get('root_gb')) newflavor["ephemeral_gb"] = int(kwargs.get('ephemeral_gb')) newflavor["swap"] = kwargs.get('swap') newflavor["rxtx_factor"] = float(kwargs.get('rxtx_factor')) newflavor["is_public"] = bool(kwargs.get('is_public')) newflavor["disabled"] = bool(kwargs.get('disabled')) return newflavor class FlavorManageTestV21(test.NoDBTestCase): controller = flavormanage_v21.FlavorManageController() validation_error = exception.ValidationError base_url = '/v2/fake/flavors' def setUp(self): super(FlavorManageTestV21, self).setUp() self.stubs.Set(flavors, "get_flavor_by_flavor_id", fake_get_flavor_by_flavor_id) self.stubs.Set(flavors, "destroy", fake_destroy) self.stubs.Set(db, "flavor_create", fake_create) self.ctxt = context.RequestContext('fake', 'fake', is_admin=True, auth_token=True) self.app = self._setup_app() self.request_body = { "flavor": { "name": "test", "ram": 512, "vcpus": 2, "disk": 1, "OS-FLV-EXT-DATA:ephemeral": 1, "id": unicode('1234'), "swap": 512, "rxtx_factor": 1, "os-flavor-access:is_public": True, } } self.expected_flavor = self.request_body def _setup_app(self): return fakes.wsgi_app_v21(init_only=('os-flavor-manage', 'os-flavor-rxtx', 'os-flavor-access', 'flavors', 'os-flavor-extra-data')) def test_delete(self): req = fakes.HTTPRequest.blank(self.base_url + '/1234') res = self.controller._delete(req, 1234) # NOTE: on v2.1, http status code is set as wsgi_code of API # method instead of status_int in a response object. if isinstance(self.controller, flavormanage_v21.FlavorManageController): status_int = self.controller._delete.wsgi_code else: status_int = res.status_int self.assertEqual(202, status_int) # subsequent delete should fail self.assertRaises(webob.exc.HTTPNotFound, self.controller._delete, req, "failtest") def _test_create_missing_parameter(self, parameter): body = { "flavor": { "name": "azAZ09. -_", "ram": 512, "vcpus": 2, "disk": 1, "OS-FLV-EXT-DATA:ephemeral": 1, "id": unicode('1234'), "swap": 512, "rxtx_factor": 1, "os-flavor-access:is_public": True, } } del body['flavor'][parameter] req = fakes.HTTPRequest.blank(self.base_url) self.assertRaises(self.validation_error, self.controller._create, req, body=body) def test_create_missing_name(self): self._test_create_missing_parameter('name') def test_create_missing_ram(self): self._test_create_missing_parameter('ram') def test_create_missing_vcpus(self): self._test_create_missing_parameter('vcpus') def test_create_missing_disk(self): self._test_create_missing_parameter('disk') def _create_flavor_success_case(self, body): req = webob.Request.blank(self.base_url) req.headers['Content-Type'] = 'application/json' req.method = 'POST' req.body = jsonutils.dumps(body) res = req.get_response(self.app) self.assertEqual(200, res.status_code) return jsonutils.loads(res.body) def test_create(self): body = self._create_flavor_success_case(self.request_body) for key in self.expected_flavor["flavor"]: self.assertEqual(body["flavor"][key], self.expected_flavor["flavor"][key]) def test_create_public_default(self): del self.request_body['flavor']['os-flavor-access:is_public'] body = self._create_flavor_success_case(self.request_body) for key in self.expected_flavor["flavor"]: self.assertEqual(body["flavor"][key], self.expected_flavor["flavor"][key]) def test_create_without_flavorid(self): del self.request_body['flavor']['id'] body = self._create_flavor_success_case(self.request_body) for key in self.expected_flavor["flavor"]: self.assertEqual(body["flavor"][key], self.expected_flavor["flavor"][key]) def _create_flavor_bad_request_case(self, body): self.stubs.UnsetAll() req = webob.Request.blank(self.base_url) req.headers['Content-Type'] = 'application/json' req.method = 'POST' req.body = jsonutils.dumps(body) res = req.get_response(self.app) self.assertEqual(res.status_code, 400) def test_create_invalid_name(self): self.request_body['flavor']['name'] = 'bad !@#!$%\x00 name' self._create_flavor_bad_request_case(self.request_body) def test_create_flavor_name_is_whitespace(self): self.request_body['flavor']['name'] = ' ' self._create_flavor_bad_request_case(self.request_body) def test_create_with_name_too_long(self): self.request_body['flavor']['name'] = 'a' * 256 self._create_flavor_bad_request_case(self.request_body) def test_create_without_flavorname(self): del self.request_body['flavor']['name'] self._create_flavor_bad_request_case(self.request_body) def test_create_empty_body(self): body = { "flavor": {} } self._create_flavor_bad_request_case(body) def test_create_no_body(self): body = {} self._create_flavor_bad_request_case(body) def test_create_invalid_format_body(self): body = { "flavor": [] } self._create_flavor_bad_request_case(body) def test_create_invalid_flavorid(self): self.request_body['flavor']['id'] = "!@#!$#!$^#&^$&" self._create_flavor_bad_request_case(self.request_body) def test_create_check_flavor_id_length(self): MAX_LENGTH = 255 self.request_body['flavor']['id'] = "a" * (MAX_LENGTH + 1) self._create_flavor_bad_request_case(self.request_body) def test_create_with_leading_trailing_whitespaces_in_flavor_id(self): self.request_body['flavor']['id'] = " bad_id " self._create_flavor_bad_request_case(self.request_body) def test_create_without_ram(self): del self.request_body['flavor']['ram'] self._create_flavor_bad_request_case(self.request_body) def test_create_with_0_ram(self): self.request_body['flavor']['ram'] = 0 self._create_flavor_bad_request_case(self.request_body) def test_create_without_vcpus(self): del self.request_body['flavor']['vcpus'] self._create_flavor_bad_request_case(self.request_body) def test_create_with_0_vcpus(self): self.request_body['flavor']['vcpus'] = 0 self._create_flavor_bad_request_case(self.request_body) def test_create_without_disk(self): del self.request_body['flavor']['disk'] self._create_flavor_bad_request_case(self.request_body) def test_create_with_minus_disk(self): self.request_body['flavor']['disk'] = -1 self._create_flavor_bad_request_case(self.request_body) def test_create_with_minus_ephemeral(self): self.request_body['flavor']['OS-FLV-EXT-DATA:ephemeral'] = -1 self._create_flavor_bad_request_case(self.request_body) def test_create_with_minus_swap(self): self.request_body['flavor']['swap'] = -1 self._create_flavor_bad_request_case(self.request_body) def test_create_with_minus_rxtx_factor(self): self.request_body['flavor']['rxtx_factor'] = -1 self._create_flavor_bad_request_case(self.request_body) def test_create_with_non_boolean_is_public(self): self.request_body['flavor']['os-flavor-access:is_public'] = 123 self._create_flavor_bad_request_case(self.request_body) def test_flavor_exists_exception_returns_409(self): expected = { "flavor": { "name": "test", "ram": 512, "vcpus": 2, "disk": 1, "OS-FLV-EXT-DATA:ephemeral": 1, "id": 1235, "swap": 512, "rxtx_factor": 1, "os-flavor-access:is_public": True, } } def fake_create(name, memory_mb, vcpus, root_gb, ephemeral_gb, flavorid, swap, rxtx_factor, is_public): raise exception.FlavorExists(name=name) self.stubs.Set(flavors, "create", fake_create) req = webob.Request.blank(self.base_url) req.headers['Content-Type'] = 'application/json' req.method = 'POST' req.body = jsonutils.dumps(expected) res = req.get_response(self.app) self.assertEqual(res.status_int, 409) @mock.patch('nova.compute.flavors.create', side_effect=exception.FlavorCreateFailed) def test_flavor_create_db_failed(self, mock_create): request_dict = { "flavor": { "name": "test", 'id': "12345", "ram": 512, "vcpus": 2, "disk": 1, "OS-FLV-EXT-DATA:ephemeral": 1, "swap": 512, "rxtx_factor": 1, "os-flavor-access:is_public": True, } } req = webob.Request.blank(self.base_url) req.headers['Content-Type'] = 'application/json' req.method = 'POST' req.body = jsonutils.dumps(request_dict) res = req.get_response(self.app) self.assertEqual(res.status_int, 500) self.assertIn('Unable to create flavor', res.body) def test_invalid_memory_mb(self): """Check negative and decimal number can't be accepted.""" self.stubs.UnsetAll() self.assertRaises(exception.InvalidInput, flavors.create, "abc", -512, 2, 1, 1, 1234, 512, 1, True) self.assertRaises(exception.InvalidInput, flavors.create, "abcd", 512.2, 2, 1, 1, 1234, 512, 1, True) self.assertRaises(exception.InvalidInput, flavors.create, "abcde", None, 2, 1, 1, 1234, 512, 1, True) self.assertRaises(exception.InvalidInput, flavors.create, "abcdef", 512, 2, None, 1, 1234, 512, 1, True) self.assertRaises(exception.InvalidInput, flavors.create, "abcdef", "test_memory_mb", 2, None, 1, 1234, 512, 1, True) class FakeRequest(object): environ = {"nova.context": context.get_admin_context()} class PrivateFlavorManageTestV21(test.TestCase): controller = flavormanage_v21.FlavorManageController() base_url = '/v2/fake/flavors' def setUp(self): super(PrivateFlavorManageTestV21, self).setUp() self.flavor_access_controller = flavor_access.FlavorAccessController() self.ctxt = context.RequestContext('fake', 'fake', is_admin=True, auth_token=True) self.app = self._setup_app() self.expected = { "flavor": { "name": "test", "ram": 512, "vcpus": 2, "disk": 1, "OS-FLV-EXT-DATA:ephemeral": 1, "swap": 512, "rxtx_factor": 1 } } def _setup_app(self): return fakes.wsgi_app_v21(init_only=('os-flavor-manage', 'os-flavor-access', 'os-flavor-rxtx', 'flavors', 'os-flavor-extra-data'), fake_auth_context=self.ctxt) def _get_response(self): req = webob.Request.blank(self.base_url) req.headers['Content-Type'] = 'application/json' req.method = 'POST' req.body = jsonutils.dumps(self.expected) res = req.get_response(self.app) return jsonutils.loads(res.body) def test_create_private_flavor_should_not_grant_flavor_access(self): self.expected["flavor"]["os-flavor-access:is_public"] = False body = self._get_response() for key in self.expected["flavor"]: self.assertEqual(body["flavor"][key], self.expected["flavor"][key]) flavor_access_body = self.flavor_access_controller.index( FakeRequest(), body["flavor"]["id"]) expected_flavor_access_body = { "tenant_id": "%s" % self.ctxt.project_id, "flavor_id": "%s" % body["flavor"]["id"] } self.assertNotIn(expected_flavor_access_body, flavor_access_body["flavor_access"]) def test_create_public_flavor_should_not_create_flavor_access(self): self.expected["flavor"]["os-flavor-access:is_public"] = True body = self._get_response() for key in self.expected["flavor"]: self.assertEqual(body["flavor"][key], self.expected["flavor"][key]) class FlavorManageTestV2(FlavorManageTestV21): controller = flavormanage_v2.FlavorManageController() validation_error = webob.exc.HTTPBadRequest def setUp(self): super(FlavorManageTestV2, self).setUp() self.flags( osapi_compute_extension=[ 'nova.api.openstack.compute.contrib.select_extensions'], osapi_compute_ext_list=['Flavormanage', 'Flavorextradata', 'Flavor_access', 'Flavor_rxtx', 'Flavor_swap']) def _setup_app(self): return fakes.wsgi_app(init_only=('flavors',), fake_auth_context=self.ctxt) class PrivateFlavorManageTestV2(PrivateFlavorManageTestV21): controller = flavormanage_v2.FlavorManageController() def setUp(self): super(PrivateFlavorManageTestV2, self).setUp() self.flags( osapi_compute_extension=[ 'nova.api.openstack.compute.contrib.select_extensions'], osapi_compute_ext_list=['Flavormanage', 'Flavorextradata', 'Flavor_access', 'Flavor_rxtx', 'Flavor_swap']) def _setup_app(self): return fakes.wsgi_app(init_only=('flavors',), fake_auth_context=self.ctxt)
Metaswitch/calico-nova
nova/tests/unit/api/openstack/compute/contrib/test_flavor_manage.py
Python
apache-2.0
17,379
0.000173
# coding=utf-8 import os import subprocess import sys import pytest import virtual_environments from diff_test_tools import expected_messages, SCRIPT from service_messages import ServiceMessage, assert_service_messages, match from test_util import run_command @pytest.fixture(scope='module') def venv(request): """ Prepares a virtual environment for unittest, no extra packages required :rtype : virtual_environments.VirtualEnvDescription """ return virtual_environments.prepare_virtualenv() def test_changes_name(venv): output = run_directly(venv, 'test_changes_name.py') assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': "__main__.Foo.test_aa (1)", 'flowId': "__main__.Foo.test_aa (1)"}), ServiceMessage('testFinished', {'name': "__main__.Foo.test_aa (11)", 'flowId': "__main__.Foo.test_aa (11)"}), ]) def test_nested_suits(venv): output = run_directly(venv, 'nested_suits.py') test_name = '__main__.TestXXX.runTest' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'captureStandardOutput': 'true', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name}), ]) def test_docstring(venv): output = run_directly(venv, 'docstring.py') test_name = '__main__.TestXXX.runTest (A test_)' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) def test_assert(venv): output = run_directly(venv, 'assert.py') test_name = '__main__.TestXXX.runTest' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index("assert 1 == 0") > 0 def test_fail(venv): output = run_directly(venv, 'fail_test.py') test_name = '__main__.TestXXX.runTest' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index('fail("Grr")') > 0 def test_setup_error(venv): output = run_directly(venv, 'setup_error.py') test_name = '__main__.TestXXX.runTest' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Error', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index("RRR") > 0 assert failed_ms.params['details'].index("setUp") > 0 def test_teardown_error(venv): output = run_directly(venv, 'teardown_error.py') test_name = '__main__.TestXXX.runTest' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Error', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index("RRR") > 0 assert failed_ms.params['details'].index("tearDown") > 0 @pytest.mark.skipif("sys.version_info < (2, 7)", reason="buffer requires Python 2.7+") def test_buffer_output(venv): output = run_directly(venv, 'buffer_output.py') test_name = '__main__.SpamTest.test_test' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testStdOut', {'out': "stdout_test1|n", 'flowId': test_name}), ServiceMessage('testStdOut', {'out': "stdout_test2|n", 'flowId': test_name}), ServiceMessage('testStdErr', {'out': "stderr_test1", 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testStdErr', {'out': "stderr_test2", 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) # Check no stdout_test or stderr_test in the output (not in service messages) # it checks self._mirrorOutput = False output = output.replace("out='stdout_test", "").replace("out='stderr_test", "") assert output.find("stdout_test") < 0 assert output.find("stderr_test") < 0 def test_doctests(venv): output = run_directly(venv, 'doctests.py') test_name = '__main__.factorial' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) def test_skip(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2==0.5.1"]) output = run_directly(venv, 'skip_test.py') test_name = '__main__.TestSkip.test_skip_me' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "2"}), ServiceMessage('testStarted', {'name': '__main__.TestSkip.test_ok'}), ServiceMessage('testFinished', {'name': '__main__.TestSkip.test_ok'}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testIgnored', {'name': test_name, 'message': u'Skipped: testing skipping øпричина', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) def test_expected_failure(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'expected_failure.py') test_name = '__main__.TestSkip.test_expected_failure' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testIgnored', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testIgnored', {'name': test_name})) assert failed_ms.params['message'].find("Expected failure") == 0 assert failed_ms.params['message'].find("this should happen unfortunately") > 0 def test_subtest_ok(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_ok.py') test_name = '__main__.TestXXX.testSubtestSuccess' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=1)', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) def test_subtest_named(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_named.py') test_id = '__main__.NumbersTest.test_even' test_name = test_id + " (Test that numbers between 0 and 5 are all even_)" assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Failure'}), ServiceMessage('testStdErr', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockClosed', {'name': '(i=1)', 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'flowId': test_name}), ]) def test_subtest_error(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_error.py') test_name = '__main__.TestXXX.testSubtestError' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': "(i=|'abc_xxx|')", 'flowId': test_name, 'subTestResult': 'Error'}), ServiceMessage('testStdErr', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockClosed', {'name': "(i=|'abc_xxx|')", 'flowId': test_name}), ServiceMessage('testFailed', {'details': "Failed subtests list: (i=|'abc_xxx|')", 'message': 'One or more subtests failed', 'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testStdErr', {'name': test_name})) assert failed_ms.params['out'].find("SubTest error") >= 0 assert failed_ms.params['out'].find("RuntimeError") >= 0 assert failed_ms.params['out'].find("RRR") >= 0 def test_subtest_failure(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_failure.py') test_name = '__main__.TestXXX.testSubtestFailure' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': "(i=|'abc_xxx|')", 'flowId': test_name, 'subTestResult': 'Failure'}), ServiceMessage('testStdErr', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockClosed', {'name': "(i=|'abc_xxx|')", 'flowId': test_name}), ServiceMessage('testFailed', {'details': "Failed subtests list: (i=|'abc_xxx|')", 'message': 'One or more subtests failed', 'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testStdErr', {'name': test_name})) assert failed_ms.params['out'].find("SubTest failure") >= 0 assert failed_ms.params['out'].find("AssertionError") >= 0 assert failed_ms.params['out'].find("assert 1 == 0") >= 0 def test_subtest_nested(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_nested.py') test_name = '__main__.TestXXX.testNested' # Nested blocks support requires strict notifications about starting and stopping subtests # which is not yet supported, see https://mail.python.org/pipermail/python-dev/2016-June/145402.html assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=2)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=2)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=1)', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) def test_subtest_skip(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_skip.py') test_name = '__main__.TestXXX.testSubtestSkip' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=2)', 'flowId': test_name, 'subTestResult': 'Skip'}), ServiceMessage('testStdOut', {'name': test_name, 'flowId': test_name, 'out': 'SubTest skipped: skip reason|n'}), ServiceMessage('blockClosed', {'name': '(i=2)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=1)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=1)', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) def test_setup_class_skip(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'setup_class_skip.py') test1_name = '__main__.TestSimple.setUpClass' test2_name = '__main__.TestSubSimple.setUpClass' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "7"}), ServiceMessage('testStarted', {'name': test1_name, 'flowId': test1_name}), ServiceMessage('testIgnored', {'name': test1_name, 'flowId': test1_name, 'message': "Skipped: Skip whole Case"}), ServiceMessage('testFinished', {'name': test1_name, 'flowId': test1_name}), ServiceMessage('testStarted', {'name': test2_name, 'flowId': test2_name}), ServiceMessage('testIgnored', {'name': test2_name, 'flowId': test2_name, 'message': "Skipped: Skip whole Case"}), ServiceMessage('testFinished', {'name': test2_name, 'flowId': test2_name}), ]) def test_subtest_mixed_failure(venv): if sys.version_info < (3, 4): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'subtest_mixed_failure.py') test_name = '__main__.TestXXX.testSubtestFailure' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockOpened', {'name': '(i=0)', 'flowId': test_name, 'subTestResult': 'Success'}), ServiceMessage('blockClosed', {'name': '(i=0)', 'flowId': test_name}), ServiceMessage('blockOpened', {'name': "(i=|'abc_xxx|')", 'flowId': test_name, 'subTestResult': 'Failure'}), ServiceMessage('testStdErr', {'name': test_name, 'flowId': test_name}), ServiceMessage('blockClosed', {'name': "(i=|'abc_xxx|')", 'flowId': test_name}), ServiceMessage('testFailed', {'message': 'Failure', 'name': test_name, 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].find("Failed subtests list: (i=|'abc_xxx|')|n|n") >= 0 assert failed_ms.params['details'].find("AssertionError") > 0 assert failed_ms.params['details'].find("6 == 1") > 0 def test_unexpected_success(venv): if sys.version_info < (2, 7): venv = virtual_environments.prepare_virtualenv(list(venv.packages) + ["unittest2"]) output = run_directly(venv, 'unexpected_success.py') test_name = '__main__.TestSkip.test_unexpected_success' assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'details': "Test should not succeed since it|'s marked with @unittest.expectedFailure", 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) @pytest.mark.skipif("sys.version_info < (2, 7)", reason="unittest discovery requires Python 2.7+") def test_discovery(venv): output = run_directly(venv, 'discovery.py') assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': 'testsimple.TestTeamcityMessages.runTest'}), ServiceMessage('testFinished', {'name': 'testsimple.TestTeamcityMessages.runTest'}), ]) @pytest.mark.skipif("sys.version_info < (3, 2)", reason="unittest failfast requires Python 3.2+") def test_fail_fast(venv): output = run_directly(venv, 'fail_fast.py') assert_service_messages( output, [ ServiceMessage('testCount', {'count': "3"}), ServiceMessage('testStarted', {'name': '__main__.FooTest.test_1_test'}), ServiceMessage('testFinished', {'name': '__main__.FooTest.test_1_test'}), ServiceMessage('testStarted', {'name': '__main__.FooTest.test_2_test'}), ServiceMessage('testFailed', {'name': '__main__.FooTest.test_2_test'}), ServiceMessage('testFinished', {'name': '__main__.FooTest.test_2_test'}), ]) @pytest.mark.skipif("sys.version_info < (2, 7)", reason="unittest discovery requires Python 2.7+") def test_discovery_errors(venv): output = run_directly(venv, 'discovery_errors.py') ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {}), ServiceMessage('testFailed', {'message': 'Error'}), ServiceMessage('testFinished', {}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {})) assert failed_ms.params['details'].index("ImportError") > 0 @pytest.mark.skipif("sys.version_info < (2, 7)", reason="requires Python 2.7+") def test_setup_module_error(venv): output = run_directly(venv, 'setup_module_error.py') test_name = '__main__.setUpModule' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index("assert 1 == 0") > 0 @pytest.mark.skipif("sys.version_info < (2, 7)", reason="requires Python 2.7+") def test_setup_class_error(venv): output = run_directly(venv, 'setup_class_error.py') test_name = '__main__.TestXXX.setUpClass' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}), ServiceMessage('testFailed', {'name': test_name, 'message': 'Failure', 'flowId': test_name}), ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name})) assert failed_ms.params['details'].index("RRR") > 0 @pytest.mark.skipif("sys.version_info < (2, 7)", reason="requires Python 2.7+") def test_teardown_class_error(venv): output = run_directly(venv, 'teardown_class_error.py') ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.tearDownClass'}), ServiceMessage('testFailed', {'name': '__main__.TestXXX.tearDownClass', 'message': 'Failure'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.tearDownClass'}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': '__main__.TestXXX.tearDownClass'})) assert failed_ms.params['details'].index("RRR") > 0 @pytest.mark.skipif("sys.version_info < (2, 7)", reason="requires Python 2.7+") def test_teardown_module_error(venv): output = run_directly(venv, 'teardown_module_error.py') teardown_test_name = '__main__.tearDownModule' ms = assert_service_messages( output, [ ServiceMessage('testCount', {'count': "1"}), ServiceMessage('testStarted', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testFinished', {'name': '__main__.TestXXX.test_ok'}), ServiceMessage('testStarted', {'name': teardown_test_name, 'flowId': teardown_test_name}), ServiceMessage('testFailed', {'name': teardown_test_name, 'message': 'Failure', 'flowId': teardown_test_name}), ServiceMessage('testFinished', {'name': teardown_test_name, 'flowId': teardown_test_name}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': teardown_test_name})) assert failed_ms.params['details'].index("assert 1 == 0") > 0 @pytest.mark.skipif("sys.version_info < (2, 7)", reason="requires Python 2.7+") def test_twisted_trial(venv): packages = list(*venv.packages) packages.append("twisted") if os.name == 'nt': if sys.version_info < (2, 7): pytest.skip("pypiwin32 is available since Python 2.7") elif sys.version_info[:2] == (3, 4): packages.append("pypiwin32==219") else: packages.append("pypiwin32") venv_with_twisted = virtual_environments.prepare_virtualenv(packages) env = virtual_environments.get_clean_system_environment() env['PYTHONPATH'] = os.path.join(os.getcwd(), "tests", "guinea-pigs", "unittest") # Start the process and wait for its output trial_file = os.path.join(venv_with_twisted.bin, 'trial') trial_exe_file = os.path.join(venv_with_twisted.bin, 'trial.exe') trial_py_file = os.path.join(venv_with_twisted.bin, 'trial.py') if os.path.exists(trial_file): command = trial_file elif os.path.exists(trial_py_file): command = os.path.join(venv_with_twisted.bin, 'python') + " " + trial_py_file elif os.path.exists(trial_exe_file): command = trial_exe_file else: raise Exception("twisted trial is not found at " + trial_py_file + " or " + trial_file + " or " + trial_exe_file) command += " --reporter=teamcity twisted_trial" print("RUN: " + command) proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, shell=True) output = "".join([x.decode() for x in proc.stdout.readlines()]) proc.wait() print("OUTPUT:" + output.replace("#", "*")) test1 = "twisted_trial.test_case.CalculationTestCase.test_fail (some desc)" test2 = "twisted_trial.test_case.CalculationTestCase.test_ok" test3 = "twisted_trial.test_exception.TestFailure.testBadCode" ms = assert_service_messages( output, [ ServiceMessage('testStarted', {'name': test1}), ServiceMessage('testFailed', {'name': test1}), ServiceMessage('testFinished', {'name': test1}), ServiceMessage('testStarted', {'name': test2}), ServiceMessage('testFinished', {'name': test2}), ServiceMessage('testStarted', {'name': test3}), ServiceMessage('testFailed', {'name': test3}), ServiceMessage('testFailed', {'name': test3}), ServiceMessage('testFinished', {'name': test3}), ]) failed_ms = match(ms, ServiceMessage('testFailed', {'name': test1})) assert failed_ms.params['details'].index("5 != 4") > 0 @pytest.mark.skipif("sys.version_info < (2, 7) ", reason="requires Python 2.7") def test_diff(venv): output = run_directly(venv, SCRIPT) assert_service_messages( output, [ ServiceMessage('testCount', {'count': "3"}), ] + expected_messages("__main__.FooTest")) @pytest.mark.skipif("sys.version_info < (3, 4) ", reason="requires Python 3.4") def test_diff_subtest(venv): output = run_directly(venv, "../diff_assert_subtest.py") assert "AssertionError: True != False" in output, "AssertionError broken in output {0}".format(output) def test_equals_processed_correctly(venv): output = run_directly(venv, "../diff_equals_using_eq.py") assert output and "testFailed" not in output def run_directly(venv, file): command = os.path.join(venv.bin, 'python') + " " + os.path.join('tests', 'guinea-pigs', 'unittest', file) return run_command(command)
throwable-one/teamcity-messages
tests/integration-tests/unittest_integration_test.py
Python
apache-2.0
27,766
0.00544
from Queue import Queue, Empty import contextlib from logging import getLogger import random import time from gevent.monkey import saved LOG = getLogger(__name__) if bool(saved): LOG.info('using zmq.green...') import zmq.green as zmq else: import zmq class ZMQConnection(object): def __init__(self, hosts, maxidle=None, timeout=2 * 1000, maxage=60): self._last_used = self._born = time.time() self._closed = False self.maxidle = maxidle self.maxage = maxage self.timeout = timeout self._zmq_init(hosts) def _zmq_init(self, hosts): context = zmq.Context() random.shuffle(hosts) self.socket = context.socket(zmq.REQ) self.socket.setsockopt(zmq.LINGER, 0) for (host, port) in hosts: self.socket.connect("tcp://%s:%s" % (host, port)) self.poller = zmq.Poller() self.poller.register(self.socket, zmq.POLLIN) def send(self, req): self.socket.send(req) self._last_used = time.time() def recv(self, timeout=None): if self.poller.poll(timeout or self.timeout): resp = self.socket.recv() else: self.close() raise TimeoutError("Timeout processing request.") self._last_used = time.time() return resp def close(self): try: self.socket.close() except: pass self._closed = True @property def closed(self): if self._closed: return self._closed t = time.time() died_of_old_age = self.maxage and t - self._born > self.maxage died_of_boredom = self.maxidle and t - self._last_used > self.maxidle if died_of_old_age: self.close() return True if died_of_boredom: self.close() return True return False class ConnectionError(IOError): pass class ZMQConnectionPool(object): def __init__(self, create_connection, maxsize=100): self.maxsize = maxsize self.pool = Queue() self.size = 0 self.create_connection = create_connection def get(self, block=True, timeout=None): pool = self.pool if self.size >= self.maxsize or pool.qsize(): # we're over limit or there are already created objects in the queue try: conn = pool.get(block=block, timeout=timeout) except Empty: raise ConnectionError("Too many connections") # we got a connection, but it must be valid! # a null connection means we need to create a new one if conn and not conn.closed: return conn # we didn't get a valid connection, add one. else: # we have to room to grow, so reserve a spot! self.size += 1 try: conn = self.create_connection() except: self.size -= 1 raise return conn def put(self, item): self.pool.put(item) @contextlib.contextmanager def connection(self, **kwargs): """ :yield: ZMQConnection """ conn = None try: conn = self.get(**kwargs) yield conn except: # if we had problems let's discard if conn: conn.close() raise finally: if conn and conn.closed: # this "returns" to the pool, but will result # in a new connection conn = None self.put(conn) class TimeoutError(IOError): pass
Livefyre/protobuf-rpc
python/protobuf_rpc/connection.py
Python
mit
3,673
0.001089
import pandas as pd import numpy as np import re from gensim import corpora, models, similarities from gensim.parsing.preprocessing import STOPWORDS def split(text): ''' Split the input text into words/tokens; ignoring stopwords and empty strings ''' delimiters = ".", ",", ";", ":", "-", "(", ")", " ", "\t" regexPattern = '|'.join(map(re.escape, delimiters)) return [word for word in re.split(regexPattern, text.lower()) if word not in STOPWORDS and word != ""] def main(): # Load data df_train = pd.read_csv('data/train.csv', encoding="ISO-8859-1") df_desc = pd.read_csv('data/product_descriptions.csv', encoding="ISO-8859-1") df_attr = pd.read_csv('data/attributes_combined.csv', encoding="ISO-8859-1") # split the texts titles = [split(line) for line in df_train["product_title"]] descs = [split(line) for line in df_desc["product_description"]] attrs = [[str(line)] if isinstance(line, float) else split(line) for line in df_attr["attr_value"]] queries = [split(line) for line in df_train["search_term"]] texts = np.concatenate((titles, descs, attrs, queries)) # remove infrequent words from collections import defaultdict frequency = defaultdict(int) for text in texts: for token in text: frequency[token] += 1 texts = [[token for token in text if frequency[token] > 2] for text in texts] # build dictionary dictionary = corpora.Dictionary(texts) dictionary.save('homedepot.dict') print dictionary # actually build a bag-of-words corpus corpus = [dictionary.doc2bow(text) for text in texts] corpora.MmCorpus.serialize('homedepot.mm', corpus) # build Tf-idf model tfidf = models.TfidfModel(corpus) tfidf.save('homedepot.tfidf') if __name__ == "__main__": main()
CSC591ADBI-TeamProjects/Product-Search-Relevance
build_tfidf.py
Python
mit
1,752
0.021119
# -*- coding: utf-8 -*- import pytest from lupin.validators import Equal from lupin.errors import ValidationError @pytest.fixture def invalid(): return Equal("sernine") @pytest.fixture def valid(): return Equal("lupin") class TestAnd(object): def test_returns_an_and_combination(self, valid, invalid): combination = valid & invalid with pytest.raises(ValidationError): combination("andrésy", []) class TestOr(object): def test_returns_an_and_combination(self, valid, invalid): combination = valid | invalid combination("lupin", [])
holinnn/lupin
tests/lupin/validators/test_validator.py
Python
mit
601
0
from sandbox.dalz.data import ArticleCommentCountFileData, ArticlePublicationDateFileData, ArticleAuthorFileData, \ ArticleWordCountFileData, CommentAuthorCommentCountFilesDatas, AuthorArticleCountFilesData, \ AuthorArticlesCommentsCountAverageFilesData, AuthorArticlesWordsCountAverageFilesData, \ ArticlePublicationHourFileData, ArticlePatriceCommentCountFileData from tde.Implode import Implode class ArticleImplode(Implode): _name = 'Articles' _data_classes = [ArticleWordCountFileData, ArticleCommentCountFileData, ArticlePublicationDateFileData, ArticlePublicationHourFileData, ArticleAuthorFileData, ArticlePatriceCommentCountFileData] class AuthorImplode(Implode): _name = 'Authors' _data_classes = [AuthorArticleCountFilesData, AuthorArticlesCommentsCountAverageFilesData, AuthorArticlesWordsCountAverageFilesData]
buxx/TextDataExtractor
sandbox/dalz/implode.py
Python
gpl-2.0
1,003
0.002991
# pylint: disable=missing-docstring # pylint: disable=wildcard-import from .test_mocks import * from .cpython.testmock import * from .cpython.testwith import *
nivbend/mock-open
src/mock_open/test/__init__.py
Python
mit
161
0
from project_cron.utils import processutil def open(app_name): script = ''' if application "%s" is not running then tell application "%s" to activate end if ''' % (app_name, app_name) processutil.call(['/usr/bin/osascript', '-e', script]) def close(app_name): script = 'tell application "%s" to quit' % app_name processutil.call(['/usr/bin/osascript', '-e', script])
ecleya/project_cron
project_cron/utils/apputil.py
Python
mit
424
0
#!/usr/bin/env python # A python script to take targets from a google spreadsheet and run a # Nessus vulnerability scan. import json import gspread from oauth2client.service_account import ServiceAccountCredentials from nessrest import ness6rest import getpass # Login with your Google account's API key scopes = ['https://spreadsheets.google.com/feeds'] credentials = ServiceAccountCredentials.from_json_keyfile_name('API-xxxxxxxxxxxx.json', scopes) gc = gspread.authorize(credentials) # Open worksheet from spreadsheet wks = gc.open("hosts").sheet1 # Get all values from the first column host_list = wks.col_values(1) temp_hosts = [] for i in host_list: # ignore the first entry as it's just header information # del host_list[0] if i and i != 'IP': # iterate through all rows and add to a temp array temp_hosts.append(i) print(temp_hosts) # scan # Scan Settings # nessus_url = "https://nessus.example.com:8834" nessus_url = "https://192.168.111.10:8834" scan_policy = "Basic Network Scan" scan_name = "My Scan" # Scanner Credentials user = getpass._raw_input('User: ') password = getpass.getpass() # login = "username" # password = "password" scan = ness6rest.Scanner(url=nessus_url, login=user, password=password, insecure=True) # Set scan policy that should be used scan.policy_set(name=scan_policy) # alt_targets on edit can take an array otherwise a new scan expects a string hosts = ','.join(temp_hosts) # Set target and scan name scan.scan_add(targets=hosts, name=scan_name) # scan.scan_exists(targets=hosts, name=scan_name) # Run Scan scan.scan_run() # Download results # scan.action(action="scans", method="get") # for s in scan.res['scans']: # scan.scan_name = s['name'] # scan.scan_id = s['id'] # xml_nessus = scan.download_scan(export_format='nessus') # fp = open('%s_%s.nessus'%(scan.scan_name,scan.scan_id),"w") # fp.write(xml_nessus) # fp.close()
mokuso/scan-gspread-targets
scan-gspread-targets.py
Python
mit
1,952
0.000512
# -*- coding: utf-8 -*- # Copyright 2007-2021 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. import pytest import hyperspy.api as hs from hyperspy.signals import ( Signal1D, Signal2D, EELSSpectrum, EDSTEMSpectrum, EDSSEMSpectrum, HologramImage ) from hyperspy.components1d import EELSCLEdge def test_signal(): s = Signal1D([10, 10]) s.axes_manager[0].convert_to_non_uniform_axis() with pytest.raises(NotImplementedError): s.fft() with pytest.raises(NotImplementedError): s.ifft() with pytest.raises(NotImplementedError): s.diff(0) with pytest.raises(NotImplementedError): s.rebin(scale=[1]) with pytest.raises(NotImplementedError): s.split(number_of_parts=2,axis=0) def test_signal1d(): s = Signal1D(([0, 1])) s.axes_manager[0].convert_to_non_uniform_axis() with pytest.raises(NotImplementedError): s.calibrate() with pytest.raises(NotImplementedError): s.shift1D([1]) with pytest.raises(NotImplementedError): s.estimate_shift1D([1]) with pytest.raises(NotImplementedError): s.smooth_savitzky_golay() with pytest.raises(NotImplementedError): s.smooth_tv() with pytest.raises(NotImplementedError): s.filter_butterworth() with pytest.raises(NotImplementedError): s.gaussian_filter(1) def test_signal2d(): s = Signal2D([[10, 10], [10, 10]]) s.axes_manager[0].convert_to_non_uniform_axis() with pytest.raises(NotImplementedError): s.align2D() def test_eels(): s = EELSSpectrum(([0, 1])) s0 = s.deepcopy() s.axes_manager[0].convert_to_non_uniform_axis() with pytest.raises(NotImplementedError): s.align_zero_loss_peak() with pytest.raises(NotImplementedError): s.create_model(ll=s) with pytest.raises(NotImplementedError): s.fourier_log_deconvolution(0) with pytest.raises(NotImplementedError): s.fourier_ratio_deconvolution(s) with pytest.raises(NotImplementedError): s.fourier_ratio_deconvolution(s0) with pytest.raises(NotImplementedError): s0.fourier_ratio_deconvolution(s) with pytest.raises(NotImplementedError): s.richardson_lucy_deconvolution(s) with pytest.raises(NotImplementedError): s.kramers_kronig_analysis() m = s.create_model() g = EELSCLEdge('N_K') with pytest.raises(NotImplementedError): m.append(g) def test_eds(): s = EDSTEMSpectrum(([0, 1])) s2 = EDSSEMSpectrum(([0, 1])) s.axes_manager[0].convert_to_non_uniform_axis() s2.axes_manager[0].convert_to_non_uniform_axis() s.set_microscope_parameters(20) with pytest.raises(NotImplementedError): s.get_calibration_from(s) with pytest.raises(NotImplementedError): s2.get_calibration_from(s2) m = s.create_model() with pytest.raises(NotImplementedError): m.add_family_lines('Al_Ka') with pytest.raises(NotImplementedError): m._set_energy_scale('Al_Ka', [1.0]) with pytest.raises(NotImplementedError): m._set_energy_offset('Al_Ka', [1.0]) def test_hologram_image(): s = HologramImage([[10, 10], [10, 10]]) s.axes_manager[0].convert_to_non_uniform_axis() s.axes_manager[1].convert_to_non_uniform_axis() with pytest.raises(NotImplementedError): s.estimate_sideband_position() with pytest.raises(NotImplementedError): s.estimate_sideband_size(s) with pytest.raises(NotImplementedError): s.reconstruct_phase() with pytest.raises(NotImplementedError): s.statistics() def test_lazy(): s = Signal1D([10, 10]).as_lazy() s.axes_manager[0].convert_to_non_uniform_axis() print(s) with pytest.raises(NotImplementedError): s.diff(0)
erh3cq/hyperspy
hyperspy/tests/test_non-uniform_not-implemented.py
Python
gpl-3.0
4,442
0.001576
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..compat import compat_str class TF1IE(InfoExtractor): """TF1 uses the wat.tv player.""" _VALID_URL = r'https?://(?:(?:videos|www|lci)\.tf1|(?:www\.)?(?:tfou|ushuaiatv|histoire|tvbreizh))\.fr/(?:[^/]+/)*(?P<id>[^/?#.]+)' _TESTS = [{ 'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html', 'info_dict': { 'id': '10635995', 'ext': 'mp4', 'title': 'Citroën Grand C4 Picasso 2013 : présentation officielle', 'description': 'Vidéo officielle du nouveau Citroën Grand C4 Picasso, lancé à l\'automne 2013.', }, 'params': { # Sometimes wat serves the whole file with the --test option 'skip_download': True, }, 'expected_warnings': ['HTTP Error 404'], }, { 'url': 'http://www.tfou.fr/chuggington/videos/le-grand-mysterioso-chuggington-7085291-739.html', 'info_dict': { 'id': 'le-grand-mysterioso-chuggington-7085291-739', 'ext': 'mp4', 'title': 'Le grand Mystérioso - Chuggington', 'description': 'Le grand Mystérioso - Emery rêve qu\'un article lui soit consacré dans le journal.', 'upload_date': '20150103', }, 'params': { # Sometimes wat serves the whole file with the --test option 'skip_download': True, }, 'skip': 'HTTP Error 410: Gone', }, { 'url': 'http://www.tf1.fr/tf1/koh-lanta/videos/replay-koh-lanta-22-mai-2015.html', 'only_matching': True, }, { 'url': 'http://lci.tf1.fr/sept-a-huit/videos/sept-a-huit-du-24-mai-2015-8611550.html', 'only_matching': True, }, { 'url': 'http://www.tf1.fr/hd1/documentaire/videos/mylene-farmer-d-une-icone.html', 'only_matching': True, }, { 'url': 'https://www.tf1.fr/tmc/quotidien-avec-yann-barthes/videos/quotidien-premiere-partie-11-juin-2019.html', 'info_dict': { 'id': '13641379', 'ext': 'mp4', 'title': 'md5:f392bc52245dc5ad43771650c96fb620', 'description': 'md5:44bc54f0a21322f5b91d68e76a544eae', 'upload_date': '20190611', }, 'params': { # Sometimes wat serves the whole file with the --test option 'skip_download': True, }, }] def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) wat_id = None data = self._parse_json( self._search_regex( r'__APOLLO_STATE__\s*=\s*({.+?})\s*(?:;|</script>)', webpage, 'data', default='{}'), video_id, fatal=False) if data: try: wat_id = next( video.get('streamId') for key, video in data.items() if isinstance(video, dict) and video.get('slug') == video_id) if not isinstance(wat_id, compat_str) or not wat_id.isdigit(): wat_id = None except StopIteration: pass if not wat_id: wat_id = self._html_search_regex( (r'(["\'])(?:https?:)?//www\.wat\.tv/embedframe/.*?(?P<id>\d{8})\1', r'(["\']?)streamId\1\s*:\s*(["\']?)(?P<id>\d+)\2'), webpage, 'wat id', group='id') return self.url_result('wat:%s' % wat_id, 'Wat')
vinegret/youtube-dl
youtube_dl/extractor/tf1.py
Python
unlicense
3,611
0.002777
import argparse import datetime import pathlib import numpy as np import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F from mushroom_rl.algorithms.value import AveragedDQN, CategoricalDQN, DQN,\ DoubleDQN, MaxminDQN, DuelingDQN, NoisyDQN, Rainbow from mushroom_rl.approximators.parametric import TorchApproximator from mushroom_rl.core import Core, Logger from mushroom_rl.environments import * from mushroom_rl.policy import EpsGreedy from mushroom_rl.utils.dataset import compute_metrics from mushroom_rl.utils.parameters import LinearParameter, Parameter from mushroom_rl.utils.replay_memory import PrioritizedReplayMemory """ This script runs Atari experiments with DQN, and some of its variants, as presented in: "Human-Level Control Through Deep Reinforcement Learning". Mnih V. et al.. 2015. """ class Network(nn.Module): n_features = 512 def __init__(self, input_shape, output_shape, **kwargs): super().__init__() n_input = input_shape[0] n_output = output_shape[0] self._h1 = nn.Conv2d(n_input, 32, kernel_size=8, stride=4) self._h2 = nn.Conv2d(32, 64, kernel_size=4, stride=2) self._h3 = nn.Conv2d(64, 64, kernel_size=3, stride=1) self._h4 = nn.Linear(3136, self.n_features) self._h5 = nn.Linear(self.n_features, n_output) nn.init.xavier_uniform_(self._h1.weight, gain=nn.init.calculate_gain('relu')) nn.init.xavier_uniform_(self._h2.weight, gain=nn.init.calculate_gain('relu')) nn.init.xavier_uniform_(self._h3.weight, gain=nn.init.calculate_gain('relu')) nn.init.xavier_uniform_(self._h4.weight, gain=nn.init.calculate_gain('relu')) nn.init.xavier_uniform_(self._h5.weight, gain=nn.init.calculate_gain('linear')) def forward(self, state, action=None): h = F.relu(self._h1(state.float() / 255.)) h = F.relu(self._h2(h)) h = F.relu(self._h3(h)) h = F.relu(self._h4(h.view(-1, 3136))) q = self._h5(h) if action is None: return q else: q_acted = torch.squeeze(q.gather(1, action.long())) return q_acted class FeatureNetwork(nn.Module): def __init__(self, input_shape, output_shape, **kwargs): super().__init__() n_input = input_shape[0] self._h1 = nn.Conv2d(n_input, 32, kernel_size=8, stride=4) self._h2 = nn.Conv2d(32, 64, kernel_size=4, stride=2) self._h3 = nn.Conv2d(64, 64, kernel_size=3, stride=1) self._h4 = nn.Linear(3136, Network.n_features) nn.init.xavier_uniform_(self._h1.weight, gain=nn.init.calculate_gain('relu')) nn.init.xavier_uniform_(self._h2.weight, gain=nn.init.calculate_gain('relu')) nn.init.xavier_uniform_(self._h3.weight, gain=nn.init.calculate_gain('relu')) nn.init.xavier_uniform_(self._h4.weight, gain=nn.init.calculate_gain('relu')) def forward(self, state, action=None): h = F.relu(self._h1(state.float() / 255.)) h = F.relu(self._h2(h)) h = F.relu(self._h3(h)) h = F.relu(self._h4(h.view(-1, 3136))) return h def print_epoch(epoch, logger): logger.info('################################################################') logger.info('Epoch: %d' % epoch) logger.info('----------------------------------------------------------------') def get_stats(dataset, logger): score = compute_metrics(dataset) logger.info(('min_reward: %f, max_reward: %f, mean_reward: %f,' ' games_completed: %d' % score)) return score def experiment(): np.random.seed() # Argument parser parser = argparse.ArgumentParser() arg_game = parser.add_argument_group('Game') arg_game.add_argument("--name", type=str, default='BreakoutDeterministic-v4', help='Gym ID of the Atari game.') arg_game.add_argument("--screen-width", type=int, default=84, help='Width of the game screen.') arg_game.add_argument("--screen-height", type=int, default=84, help='Height of the game screen.') arg_mem = parser.add_argument_group('Replay Memory') arg_mem.add_argument("--initial-replay-size", type=int, default=50000, help='Initial size of the replay memory.') arg_mem.add_argument("--max-replay-size", type=int, default=500000, help='Max size of the replay memory.') arg_mem.add_argument("--prioritized", action='store_true', help='Whether to use prioritized memory or not.') arg_net = parser.add_argument_group('Deep Q-Network') arg_net.add_argument("--optimizer", choices=['adadelta', 'adam', 'rmsprop', 'rmspropcentered'], default='adam', help='Name of the optimizer to use.') arg_net.add_argument("--learning-rate", type=float, default=.0001, help='Learning rate value of the optimizer.') arg_net.add_argument("--decay", type=float, default=.95, help='Discount factor for the history coming from the' 'gradient momentum in rmspropcentered and' 'rmsprop') arg_net.add_argument("--epsilon", type=float, default=1e-8, help='Epsilon term used in rmspropcentered and' 'rmsprop') arg_alg = parser.add_argument_group('Algorithm') arg_alg.add_argument("--algorithm", choices=['dqn', 'ddqn', 'adqn', 'mmdqn', 'cdqn', 'dueldqn', 'ndqn', 'rainbow'], default='dqn', help='Name of the algorithm. dqn is for standard' 'DQN, ddqn is for Double DQN and adqn is for' 'Averaged DQN.') arg_alg.add_argument("--n-approximators", type=int, default=1, help="Number of approximators used in the ensemble for" "AveragedDQN or MaxminDQN.") arg_alg.add_argument("--batch-size", type=int, default=32, help='Batch size for each fit of the network.') arg_alg.add_argument("--history-length", type=int, default=4, help='Number of frames composing a state.') arg_alg.add_argument("--target-update-frequency", type=int, default=10000, help='Number of collected samples before each update' 'of the target network.') arg_alg.add_argument("--evaluation-frequency", type=int, default=250000, help='Number of collected samples before each' 'evaluation. An epoch ends after this number of' 'steps') arg_alg.add_argument("--train-frequency", type=int, default=4, help='Number of collected samples before each fit of' 'the neural network.') arg_alg.add_argument("--max-steps", type=int, default=50000000, help='Total number of collected samples.') arg_alg.add_argument("--final-exploration-frame", type=int, default=1000000, help='Number of collected samples until the exploration' 'rate stops decreasing.') arg_alg.add_argument("--initial-exploration-rate", type=float, default=1., help='Initial value of the exploration rate.') arg_alg.add_argument("--final-exploration-rate", type=float, default=.1, help='Final value of the exploration rate. When it' 'reaches this values, it stays constant.') arg_alg.add_argument("--test-exploration-rate", type=float, default=.05, help='Exploration rate used during evaluation.') arg_alg.add_argument("--test-samples", type=int, default=125000, help='Number of collected samples for each' 'evaluation.') arg_alg.add_argument("--max-no-op-actions", type=int, default=30, help='Maximum number of no-op actions performed at the' 'beginning of the episodes.') arg_alg.add_argument("--alpha-coeff", type=float, default=.6, help='Prioritization exponent for prioritized experience replay.') arg_alg.add_argument("--n-atoms", type=int, default=51, help='Number of atoms for Categorical DQN.') arg_alg.add_argument("--v-min", type=int, default=-10, help='Minimum action-value for Categorical DQN.') arg_alg.add_argument("--v-max", type=int, default=10, help='Maximum action-value for Categorical DQN.') arg_alg.add_argument("--n-steps-return", type=int, default=3, help='Number of steps for n-step return for Rainbow.') arg_alg.add_argument("--sigma-coeff", type=float, default=.5, help='Sigma0 coefficient for noise initialization in' 'NoisyDQN and Rainbow.') arg_utils = parser.add_argument_group('Utils') arg_utils.add_argument('--use-cuda', action='store_true', help='Flag specifying whether to use the GPU.') arg_utils.add_argument('--save', action='store_true', help='Flag specifying whether to save the model.') arg_utils.add_argument('--load-path', type=str, help='Path of the model to be loaded.') arg_utils.add_argument('--render', action='store_true', help='Flag specifying whether to render the game.') arg_utils.add_argument('--quiet', action='store_true', help='Flag specifying whether to hide the progress' 'bar.') arg_utils.add_argument('--debug', action='store_true', help='Flag specifying whether the script has to be' 'run in debug mode.') args = parser.parse_args() scores = list() optimizer = dict() if args.optimizer == 'adam': optimizer['class'] = optim.Adam optimizer['params'] = dict(lr=args.learning_rate, eps=args.epsilon) elif args.optimizer == 'adadelta': optimizer['class'] = optim.Adadelta optimizer['params'] = dict(lr=args.learning_rate, eps=args.epsilon) elif args.optimizer == 'rmsprop': optimizer['class'] = optim.RMSprop optimizer['params'] = dict(lr=args.learning_rate, alpha=args.decay, eps=args.epsilon) elif args.optimizer == 'rmspropcentered': optimizer['class'] = optim.RMSprop optimizer['params'] = dict(lr=args.learning_rate, alpha=args.decay, eps=args.epsilon, centered=True) else: raise ValueError # Summary folder folder_name = './logs/atari_' + args.algorithm + '_' + args.name +\ '_' + datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') pathlib.Path(folder_name).mkdir(parents=True) # Settings if args.debug: initial_replay_size = 50 max_replay_size = 500 train_frequency = 5 target_update_frequency = 10 test_samples = 20 evaluation_frequency = 50 max_steps = 1000 else: initial_replay_size = args.initial_replay_size max_replay_size = args.max_replay_size train_frequency = args.train_frequency target_update_frequency = args.target_update_frequency test_samples = args.test_samples evaluation_frequency = args.evaluation_frequency max_steps = args.max_steps # MDP mdp = Atari(args.name, args.screen_width, args.screen_height, ends_at_life=True, history_length=args.history_length, max_no_op_actions=args.max_no_op_actions) if args.load_path: logger = Logger(DQN.__name__, results_dir=None) logger.strong_line() logger.info('Experiment Algorithm: ' + DQN.__name__) # Agent agent = DQN.load(args.load_path) epsilon_test = Parameter(value=args.test_exploration_rate) agent.policy.set_epsilon(epsilon_test) # Algorithm core_test = Core(agent, mdp) # Evaluate model dataset = core_test.evaluate(n_steps=args.test_samples, render=args.render, quiet=args.quiet) get_stats(dataset, logger) else: # Policy epsilon = LinearParameter(value=args.initial_exploration_rate, threshold_value=args.final_exploration_rate, n=args.final_exploration_frame) epsilon_test = Parameter(value=args.test_exploration_rate) epsilon_random = Parameter(value=1) pi = EpsGreedy(epsilon=epsilon_random) # Approximator approximator_params = dict( network=Network if args.algorithm not in ['dueldqn', 'cdqn', 'ndqn', 'rainbow'] else FeatureNetwork, input_shape=mdp.info.observation_space.shape, output_shape=(mdp.info.action_space.n,), n_actions=mdp.info.action_space.n, n_features=Network.n_features, optimizer=optimizer, use_cuda=args.use_cuda ) if args.algorithm not in ['cdqn', 'rainbow']: approximator_params['loss'] = F.smooth_l1_loss approximator = TorchApproximator if args.prioritized: replay_memory = PrioritizedReplayMemory( initial_replay_size, max_replay_size, alpha=args.alpha_coeff, beta=LinearParameter(.4, threshold_value=1, n=max_steps // train_frequency) ) else: replay_memory = None # Agent algorithm_params = dict( batch_size=args.batch_size, target_update_frequency=target_update_frequency // train_frequency, replay_memory=replay_memory, initial_replay_size=initial_replay_size, max_replay_size=max_replay_size ) if args.algorithm == 'dqn': alg = DQN agent = alg(mdp.info, pi, approximator, approximator_params=approximator_params, **algorithm_params) elif args.algorithm == 'ddqn': alg = DoubleDQN agent = alg(mdp.info, pi, approximator, approximator_params=approximator_params, **algorithm_params) elif args.algorithm == 'adqn': alg = AveragedDQN agent = alg(mdp.info, pi, approximator, approximator_params=approximator_params, n_approximators=args.n_approximators, **algorithm_params) elif args.algorithm == 'mmdqn': alg = MaxminDQN agent = alg(mdp.info, pi, approximator, approximator_params=approximator_params, n_approximators=args.n_approximators, **algorithm_params) elif args.algorithm == 'dueldqn': alg = DuelingDQN agent = alg(mdp.info, pi, approximator_params=approximator_params, **algorithm_params) elif args.algorithm == 'cdqn': alg = CategoricalDQN agent = alg(mdp.info, pi, approximator_params=approximator_params, n_atoms=args.n_atoms, v_min=args.v_min, v_max=args.v_max, **algorithm_params) elif args.algorithm == 'ndqn': alg = NoisyDQN agent = alg(mdp.info, pi, approximator_params=approximator_params, sigma_coeff=args.sigma_coeff, **algorithm_params) elif args.algorithm == 'rainbow': alg = Rainbow beta = LinearParameter(.4, threshold_value=1, n=max_steps // train_frequency) agent = alg(mdp.info, pi, approximator_params=approximator_params, n_atoms=args.n_atoms, v_min=args.v_min, v_max=args.v_max, n_steps_return=args.n_steps_return, alpha_coeff=args.alpha_coeff, beta=beta, sigma_coeff=args.sigma_coeff, **algorithm_params) logger = Logger(alg.__name__, results_dir=None) logger.strong_line() logger.info('Experiment Algorithm: ' + alg.__name__) # Algorithm core = Core(agent, mdp) # RUN # Fill replay memory with random dataset print_epoch(0, logger) core.learn(n_steps=initial_replay_size, n_steps_per_fit=initial_replay_size, quiet=args.quiet) if args.save: agent.save(folder_name + '/agent_0.msh') # Evaluate initial policy pi.set_epsilon(epsilon_test) mdp.set_episode_end(False) dataset = core.evaluate(n_steps=test_samples, render=args.render, quiet=args.quiet) scores.append(get_stats(dataset, logger)) np.save(folder_name + '/scores.npy', scores) for n_epoch in range(1, max_steps // evaluation_frequency + 1): print_epoch(n_epoch, logger) logger.info('- Learning:') # learning step pi.set_epsilon(epsilon) mdp.set_episode_end(True) core.learn(n_steps=evaluation_frequency, n_steps_per_fit=train_frequency, quiet=args.quiet) if args.save: agent.save(folder_name + '/agent_' + str(n_epoch) + '.msh') logger.info('- Evaluation:') # evaluation step pi.set_epsilon(epsilon_test) mdp.set_episode_end(False) dataset = core.evaluate(n_steps=test_samples, render=args.render, quiet=args.quiet) scores.append(get_stats(dataset, logger)) np.save(folder_name + '/scores.npy', scores) return scores if __name__ == '__main__': experiment()
carloderamo/mushroom
examples/atari_dqn.py
Python
mit
19,062
0.00063
import asyncio import logging import re import threading from os import PathLike from typing import Optional, Set, Union from uqbar.objects import new import supriya.exceptions from supriya.commands import ( # type: ignore FailResponse, GroupNewRequest, GroupQueryTreeRequest, NotifyRequest, QuitRequest, SyncRequest, ) from supriya.enums import CalculationRate, NodeAction from supriya.exceptions import ServerOffline from supriya.osc.protocols import ( AsyncOscProtocol, HealthCheck, OscProtocolOffline, ThreadedOscProtocol, ) from supriya.querytree import QueryTreeGroup, QueryTreeSynth from supriya.scsynth import Options, find from ..typing import AddActionLike, CalculationRateLike from .allocators import BlockAllocator, NodeIdAllocator from .buffers import Buffer, BufferGroup from .buses import AudioInputBusGroup, AudioOutputBusGroup, Bus, BusGroup from .meters import Meters from .nodes import Group, Node, RootNode, Synth from .protocols import AsyncProcessProtocol, SyncProcessProtocol from .recorder import Recorder try: from .shm import ServerSHM except (ImportError, ModuleNotFoundError): ServerSHM = None logger = logging.getLogger("supriya.server") DEFAULT_IP_ADDRESS = "127.0.0.1" DEFAULT_PORT = 57110 class BaseServer: ### INITIALIZER ### def __init__(self): # address self._ip_address = DEFAULT_IP_ADDRESS self._port = DEFAULT_PORT # process self._client_id = 0 self._is_owner = False self._is_running = False self._latency = 0.1 self._maximum_logins = 1 self._options = Options() self._osc_protocol = None self._process_protocol = None self._status = None self._shm = None # allocators self._audio_bus_allocator = None self._buffer_allocator = None self._control_bus_allocator = None self._node_id_allocator = None self._sync_id = 0 # proxy mappings self._synthdefs = {} ### SPECIAL METHODS ### def __repr__(self): if not self.is_running: return f"<{type(self).__name__}: offline>" string = "<{name}: {protocol}://{ip}:{port}, " string += "{inputs}i{outputs}o>" return string.format( name=type(self).__name__, protocol=self.options.protocol, ip=self.ip_address, port=self.port, inputs=self.options.input_bus_channel_count, outputs=self.options.output_bus_channel_count, ) ### PRIVATE METHODS ### def _handle_failed_response(self, message): logger.warning("Fail: {}".format(message)) def _handle_status_reply_response(self, message): from supriya.commands import Response response = Response.from_osc_message(message) self._status = response def _handle_synthdef_removed_response(self, message): from supriya.commands import Response response = Response.from_osc_message(message) synthdef_name = response.synthdef_name self._synthdefs.pop(synthdef_name, None) def _setup_allocators(self): self._audio_bus_allocator = BlockAllocator( heap_maximum=self._options.audio_bus_channel_count, heap_minimum=self._options.first_private_bus_id, ) self._buffer_allocator = BlockAllocator(heap_maximum=self._options.buffer_count) self._control_bus_allocator = BlockAllocator( heap_maximum=self._options.control_bus_channel_count ) self._node_id_allocator = NodeIdAllocator( initial_node_id=self._options.initial_node_id, client_id=self.client_id ) self._sync_id = self.client_id << 26 def _setup_osc_callbacks(self): self._osc_protocol.register( pattern="/d_removed", procedure=self._handle_synthdef_removed_response ) self._osc_protocol.register( pattern="/status.reply", procedure=self._handle_status_reply_response ) self._osc_protocol.register( pattern="/fail", procedure=self._handle_failed_response ) def _setup_shm(self): if ServerSHM is None: return self._shm = ServerSHM(self.port, self.options.control_bus_channel_count) def _teardown_allocators(self): self._audio_bus_allocator = None self._buffer_allocator = None self._control_bus_allocator = None self._node_id_allocator = None self._sync_id = 0 def _teardown_shm(self): self._shm = None ### PUBLIC METHODS ### def send(self, message): if not message: raise ValueError if not self.is_running: raise ServerOffline self._osc_protocol.send(message) return self ### PUBLIC PROPERTIES ### @property def audio_bus_allocator(self): return self._audio_bus_allocator @property def buffer_allocator(self): return self._buffer_allocator @property def client_id(self) -> int: return self._client_id @property def control_bus_allocator(self): return self._control_bus_allocator @property def ip_address(self) -> str: return self._ip_address @property def is_owner(self) -> bool: return self._is_owner @property def is_running(self) -> bool: return self._is_running @property def latency(self) -> float: return self._latency @latency.setter def latency(self, latency): self._latency = float(latency) @property def maximum_logins(self) -> int: return self._maximum_logins @property def next_sync_id(self) -> int: sync_id = self._sync_id self._sync_id += 1 return sync_id @property def node_id_allocator(self): return self._node_id_allocator @property def osc_protocol(self): return self._osc_protocol @property def options(self) -> Options: return self._options @property def port(self) -> int: return self._port @property def process_protocol(self): return self._process_protocol @property def status(self): return self._status class AsyncServer(BaseServer): ### CLASS VARIABLES ### _servers: Set["AsyncServer"] = set() ### INTIALIZER ### def __init__(self): BaseServer.__init__(self) self._boot_future = None self._quit_future = None ### SPECIAL METHODS ### def __contains__(self, expr): if isinstance(expr, supriya.synthdefs.SynthDef): name = expr.actual_name if name in self._synthdefs and self._synthdefs[name] == expr: return True return False ### PRIVATE METHODS ### async def _connect(self): self._osc_protocol = AsyncOscProtocol() await self._osc_protocol.connect( ip_address=self._ip_address, port=self._port, healthcheck=HealthCheck( request_pattern=["/status"], response_pattern=["/status.reply"], callback=self._shutdown, max_attempts=5, timeout=1.0, backoff_factor=1.5, ), ) self._is_running = True self._setup_osc_callbacks() await self._setup_notifications() self._setup_allocators() if self.client_id == 0: await self._setup_default_groups() await self._setup_system_synthdefs() self.boot_future.set_result(True) self._servers.add(self) async def _disconnect(self): self._is_running = False self._is_owner = False self._client_id = None self._maximum_logins = None self._teardown_shm() await self._osc_protocol.disconnect() await self._osc_protocol.exit_future self._teardown_allocators() if self in self._servers: self._servers.remove(self) self.quit_future.set_result(True) if not self.boot_future.done(): self.boot_future.set_result(False) async def _setup_default_groups(self): request = GroupNewRequest( items=[ GroupNewRequest.Item(1, i, 0) for i in range(1, self.maximum_logins + 1) ] ) self.send(request.to_osc()) async def _setup_notifications(self): request = NotifyRequest(True) response = await request.communicate_async(server=self) if isinstance(response, FailResponse): await self._shutdown() raise supriya.exceptions.TooManyClients self._client_id, self._maximum_logins = response.action[1], response.action[2] async def _setup_system_synthdefs(self): pass async def _shutdown(self): if not self.is_running: return elif self.is_owner: await self.quit() else: await self.disconnect() ### PUBLIC METHODS ### async def boot( self, *, ip_address: str = DEFAULT_IP_ADDRESS, port: int = DEFAULT_PORT, scsynth_path: Optional[str] = None, options: Optional[Options] = None, **kwargs, ) -> "AsyncServer": if self._is_running: raise supriya.exceptions.ServerOnline port = port or DEFAULT_PORT loop = asyncio.get_running_loop() self._boot_future = loop.create_future() self._quit_future = loop.create_future() self._options = new(options or Options(), **kwargs) scsynth_path = find(scsynth_path) self._process_protocol = AsyncProcessProtocol() await self._process_protocol.boot(self._options, scsynth_path, port) if not await self._process_protocol.boot_future: self._boot_future.set_result(False) self._quit_future.set_result(True) raise supriya.exceptions.ServerCannotBoot self._ip_address = ip_address self._is_owner = True self._port = port await self._connect() return self async def connect( self, *, ip_address: str = DEFAULT_IP_ADDRESS, port: int = DEFAULT_PORT ) -> "AsyncServer": if self._is_running: raise supriya.exceptions.ServerOnline loop = asyncio.get_running_loop() self._boot_future = loop.create_future() self._quit_future = loop.create_future() self._ip_address = ip_address self._is_owner = False self._port = port await self._connect() return self async def disconnect(self) -> "AsyncServer": if not self._is_running: raise ServerOffline if self._is_owner: raise supriya.exceptions.OwnedServerShutdown( "Cannot disconnect from owned server." ) await self._disconnect() return self async def query(self, include_controls=True) -> QueryTreeGroup: request = GroupQueryTreeRequest(node_id=0, include_controls=include_controls) response = await request.communicate_async(server=self) return response.query_tree_group async def quit(self, force: bool = False) -> "AsyncServer": if not self._is_running: return self if not self._is_owner and not force: raise supriya.exceptions.UnownedServerShutdown( "Cannot quit unowned server without force flag." ) try: await QuitRequest().communicate_async(server=self, sync=True, timeout=1) except (OscProtocolOffline, asyncio.TimeoutError): pass if self._process_protocol is not None: self._process_protocol.quit() await self._disconnect() return self ### PUBLIC PROPERTIES ### @property def boot_future(self): return self._boot_future @property def default_group(self) -> int: return self.client_id + 1 @property def quit_future(self): return self._quit_future class Server(BaseServer): """ An scsynth server proxy. :: >>> import supriya.realtime >>> server = supriya.realtime.Server() >>> server.boot() <Server: udp://127.0.0.1:57110, 8i8o> :: >>> server.quit() <Server: offline> """ ### CLASS VARIABLES ### _servers: Set["Server"] = set() ### INITIALIZER ### def __init__(self): BaseServer.__init__(self) self._lock = threading.RLock() # proxies self._audio_input_bus_group = None self._audio_output_bus_group = None self._default_group = None self._root_node = None self._meters = Meters(self) self._recorder = Recorder(self) # proxy mappings self._audio_buses = {} self._buffer_proxies = {} self._buffers = {} self._control_bus_proxies = {} self._control_buses = {} self._nodes = {} self._pending_synths = {} ### SPECIAL METHODS ### def __contains__(self, expr): import supriya.realtime import supriya.synthdefs if isinstance(expr, supriya.realtime.Node): if expr.server is not self: return False node_id = expr.node_id if node_id in self._nodes and self._nodes[node_id] is expr: return True elif isinstance(expr, supriya.synthdefs.SynthDef): name = expr.actual_name if name in self._synthdefs and self._synthdefs[name] == expr: return True elif isinstance(expr, supriya.realtime.ServerObject): return expr.server is self return False def __getitem__(self, item: Union[int, str]) -> Union[Buffer, Bus, Node]: """ Get ``item`` from server. :: >>> server = supriya.Server().boot() >>> supriya.Synth(name="foo").allocate(server) <+ Synth: 1000 default (foo)> :: >>> server[1000] <+ Synth: 1000 default (foo)> :: >>> server["foo"] <+ Synth: 1000 default (foo)> :: >>> server["b10"] <+ Buffer: 10, 1ch, 1> :: >>> server["a0"] <+ Bus: 0 (audio)> :: >>> server["c16"] <+ Bus: 16 (control)> :: >>> server = server.quit() >>> server["c16"] Traceback (most recent call last): ... supriya.exceptions.ServerOffline """ import supriya if not self.is_running: raise ServerOffline if isinstance(item, str): match = re.match(r"b(?P<id>\d+)", item) if match: id_ = int(match.groupdict()["id"]) return supriya.realtime.Buffer(id_).allocate(server=self) match = re.match(r"c(?P<id>\d+)", item) if match: id_ = int(match.groupdict()["id"]) return supriya.realtime.Bus(id_, "control").allocate(server=self) match = re.match(r"a(?P<id>\d+)", item) if match: id_ = int(match.groupdict()["id"]) return supriya.realtime.Bus(id_, "audio").allocate(server=self) if self.root_node is None: raise ServerOffline result = self.root_node[item] elif isinstance(item, int): result = self._nodes.get(item) else: raise ValueError(item) if isinstance(result, set) and len(result) == 1: return tuple(result)[0] return result def __graph__(self): """ Graph server. :: >>> server = supriya.Server().boot() >>> group = supriya.Group( ... [ ... supriya.Synth(), ... supriya.Group( ... [ ... supriya.Synth(), ... supriya.Synth(), ... ] ... ), ... ] ... ).allocate(server) :: >>> graph = server.__graph__() >>> print(format(graph, "graphviz")) digraph G { graph [bgcolor=transparent, color=lightslategrey, dpi=72, fontname=Arial, outputorder=edgesfirst, overlap=prism, penwidth=2, rankdir=TB, ranksep=0.5, splines=spline, style="dotted, rounded"]; node [fontname=Arial, fontsize=12, penwidth=2, shape=Mrecord, style="filled, rounded"]; edge [penwidth=2]; "root-node-0" [fillcolor=lightsalmon2, label="{ <f_0_0> RootNode | <f_0_1> id: 0 }"]; "group-1" [fillcolor=lightsteelblue2, label="{ <f_0_0> Group | <f_0_1> id: 1 }"]; "group-1000" [fillcolor=lightsteelblue2, label="{ <f_0_0> Group | <f_0_1> id: 1000 }"]; "synth-1001" [fillcolor=lightgoldenrod2, label="{ <f_0_0> Synth | <f_0_1> id: 1001 }"]; "group-1002" [fillcolor=lightsteelblue2, label="{ <f_0_0> Group | <f_0_1> id: 1002 }"]; "synth-1003" [fillcolor=lightgoldenrod2, label="{ <f_0_0> Synth | <f_0_1> id: 1003 }"]; "synth-1004" [fillcolor=lightgoldenrod2, label="{ <f_0_0> Synth | <f_0_1> id: 1004 }"]; "root-node-0" -> "group-1"; "group-1" -> "group-1000"; "group-1000" -> "synth-1001"; "group-1000" -> "group-1002"; "group-1002" -> "synth-1003"; "group-1002" -> "synth-1004"; } :: >>> supriya.graph(server) # doctest: +SKIP """ return self.root_node.__graph__() ### PRIVATE METHODS ### def _connect(self): self._osc_protocol = ThreadedOscProtocol() self._osc_protocol.connect( ip_address=self.ip_address, port=self.port, healthcheck=HealthCheck( request_pattern=["/status"], response_pattern=["/status.reply"], callback=self._shutdown, max_attempts=5, timeout=1.0, backoff_factor=1.5, ), ) self._is_running = True self._setup_osc_callbacks() self._setup_notifications() self._setup_allocators() self._setup_proxies() if self.client_id == 0: self._setup_default_groups() self._setup_system_synthdefs() self._servers.add(self) def _disconnect(self): logger.info("disconnecting") self._is_running = False self._is_owner = False self._client_id = None self._maximum_logins = None self._teardown_shm() self._osc_protocol.disconnect() self._teardown_proxies() self._teardown_allocators() if self in self._servers: self._servers.remove(self) logger.info("disconnected") def _get_buffer_proxy(self, buffer_id): import supriya.realtime buffer_proxy = self._buffer_proxies.get(buffer_id) if not buffer_proxy: buffer_proxy = supriya.realtime.BufferProxy( buffer_id=buffer_id, server=self ) self._buffer_proxies[buffer_id] = buffer_proxy return buffer_proxy def _get_control_bus_proxy(self, bus_id): import supriya.realtime import supriya.synthdefs control_bus_proxy = self._control_bus_proxies.get(bus_id) if not control_bus_proxy: control_bus_proxy = supriya.realtime.BusProxy( bus_id=bus_id, calculation_rate=supriya.CalculationRate.CONTROL, server=self, ) self._control_bus_proxies[bus_id] = control_bus_proxy return control_bus_proxy def _handle_buffer_info_response(self, message): from supriya.commands import Response response = Response.from_osc_message(message) for item in response.items: buffer_proxy = self._get_buffer_proxy(item.buffer_id) if buffer_proxy: buffer_proxy._handle_response(item) def _handle_control_bus_set_response(self, message): from supriya.commands import Response response = Response.from_osc_message(message) for item in response: bus_id = item.bus_id bus_proxy = self._get_control_bus_proxy(bus_id) bus_proxy._value = item.bus_value def _handle_control_bus_setn_response(self, message): from supriya.commands import Response response = Response.from_osc_message(message) for item in response: starting_bus_id = item.starting_bus_id for i, value in enumerate(item.bus_values): bus_id = starting_bus_id + i bus_proxy = self._get_control_bus_proxy(bus_id) bus_proxy._value = value def _handle_node_info_response(self, message): from supriya.commands import Response from supriya.realtime import Group, Synth response = Response.from_osc_message(message) with self._lock: node_id = response.node_id node = self._nodes.get(node_id) if node is not None: node._handle_response(response) elif response.action == NodeAction.NODE_CREATED: if response.is_group: node = Group() else: node = self._pending_synths.pop(node_id, Synth()) node._register_with_local_server(self, node_id=response.node_id) parent = self._nodes[response.parent_id] node._set_parent(parent) if response.previous_node_id: previous_child = self._nodes[response.previous_node_id] index = parent.index(previous_child) parent._children.insert(index + 1, node) else: parent._children.append(node) def _handle_synthdef_removed_response(self, message): from supriya.commands import Response response = Response.from_osc_message(message) synthdef_name = response.synthdef_name self._synthdefs.pop(synthdef_name, None) def _rehydrate(self): from supriya.realtime import Group, Synth def recurse(query_tree_node, node): for query_tree_child in query_tree_node.children: if isinstance(query_tree_child, QueryTreeGroup): group = Group() group._register_with_local_server( self, node_id=query_tree_child.node_id ) node._children.append(group) recurse(query_tree_child, group) elif isinstance(query_tree_child, QueryTreeSynth): synth = Synth() synth._register_with_local_server( self, node_id=query_tree_child.node_id ) node._children.append(synth) for query_tree_control in query_tree_child.children: pass recurse(self.query(), self.root_node) def _setup_notifications(self): request = NotifyRequest(True) response = request.communicate(server=self) if isinstance(response, FailResponse): self._shutdown() raise supriya.exceptions.TooManyClients if len(response.action) == 2: # supernova doesn't provide a max logins value self._client_id, self._maximum_logins = response.action[1], 1 else: self._client_id, self._maximum_logins = response.action[1:3] def _setup_default_groups(self): default_groups = [ supriya.Group(node_id_is_permanent=True) for _ in range(self.maximum_logins) ] self.root_node.extend(default_groups) self._default_group = default_groups[self.client_id] def _setup_proxies(self): self._audio_input_bus_group = AudioInputBusGroup(self) self._audio_output_bus_group = AudioOutputBusGroup(self) self._root_node = supriya.realtime.RootNode(server=self) self._nodes[0] = self._root_node def _setup_osc_callbacks(self): super()._setup_osc_callbacks() self._osc_protocol.register( pattern="/b_info", procedure=self._handle_buffer_info_response ) self._osc_protocol.register( pattern="/c_set", procedure=self._handle_control_bus_set_response ) self._osc_protocol.register( pattern="/c_setn", procedure=self._handle_control_bus_setn_response ) for pattern in ( "/n_end", "/n_go", "/n_info", "/n_move", "/n_off", "/n_on", "/n_set", "/n_setn", ): self._osc_protocol.register( pattern=pattern, procedure=self._handle_node_info_response ) def _setup_system_synthdefs(self, local_only=False): import supriya.assets.synthdefs import supriya.synthdefs system_synthdefs = [] for name in dir(supriya.assets.synthdefs): if not name.startswith("system_"): continue system_synthdef = getattr(supriya.assets.synthdefs, name) if not isinstance(system_synthdef, supriya.synthdefs.SynthDef): continue system_synthdefs.append(system_synthdef) if local_only: for synthdef in system_synthdefs: synthdef._register_with_local_server(self) else: supriya.synthdefs.SynthDef._allocate_synthdefs(system_synthdefs, self) def _teardown_proxies(self): for set_ in tuple(self._audio_buses.values()): for x in tuple(set_): x.free() for set_ in tuple(self._control_buses.values()): for x in tuple(set_): x.free() for set_ in tuple(self._buffers.values()): for x in tuple(set_): x.free() for x in tuple(self._nodes.values()): x.free() self._audio_buses.clear() self._audio_input_bus_group = None self._audio_output_bus_group = None self._buffers.clear() self._buffer_proxies.clear() self._control_buses.clear() self._control_bus_proxies.clear() self._default_group = None self._nodes.clear() self._root_node = None self._synthdefs.clear() def _shutdown(self): if not self.is_running: return logger.info("shutting down") if self.is_owner: self.quit() else: self.disconnect() ### PUBLIC METHODS ### def add_buffer( self, channel_count: int = None, frame_count: int = None, starting_frame: int = None, file_path: Optional[PathLike] = None, ) -> Buffer: """ Add a buffer. :: >>> server = supriya.Server().boot() >>> server.add_buffer(channel_count=2, frame_count=1024) <+ Buffer: 0, 2ch, 1024> """ buffer_ = Buffer() if file_path: channel_indices = None if channel_count: channel_indices = tuple(range(channel_count)) buffer_.allocate_from_file( self, file_path, channel_indices=channel_indices, frame_count=frame_count, starting_frame=starting_frame, ) else: buffer_.allocate( channel_count=channel_count, frame_count=frame_count, server=self ) return buffer_ def add_buffer_group( self, buffer_count: int = 1, channel_count: int = None, frame_count: int = None ) -> BufferGroup: """ Add a buffer group. :: >>> server = supriya.Server().boot() >>> server.add_buffer_group(buffer_count=8, channel_count=1, frame_count=1024) <+ BufferGroup{8}: 0> """ buffer_group = BufferGroup(buffer_count) buffer_group.allocate( channel_count=channel_count, frame_count=frame_count, server=self ) return buffer_group def add_bus( self, calculation_rate: CalculationRateLike = CalculationRate.CONTROL ) -> Bus: """ Add a bus. :: >>> server = supriya.Server().boot() >>> server.add_bus() <+ Bus: 0 (control)> """ bus = Bus(calculation_rate=calculation_rate) bus.allocate(server=self) return bus def add_bus_group( self, bus_count: int = 1, calculation_rate: CalculationRateLike = CalculationRate.CONTROL, ) -> BusGroup: """ Add a bus group. :: >>> server = supriya.Server().boot() >>> server.add_bus_group(4, "audio") <+ BusGroup{4}: 16 (audio)> """ bus_group = BusGroup(bus_count=bus_count, calculation_rate=calculation_rate) bus_group.allocate(server=self) return bus_group def add_group(self, add_action: AddActionLike = None) -> Group: """ Add a group relative to the default group via ``add_action``. :: >>> server = supriya.Server().boot() >>> print(server.query()) NODE TREE 0 group 1 group :: >>> group = server.add_group() >>> print(server.query()) NODE TREE 0 group 1 group 1000 group """ if self.default_group is None: raise ServerOffline return self.default_group.add_group(add_action=add_action) def add_synth( self, synthdef=None, add_action: AddActionLike = None, **kwargs ) -> Synth: """ Add a synth relative to the default group via ``add_action``. :: >>> server = supriya.Server().boot() >>> print(server.query()) NODE TREE 0 group 1 group :: >>> synth = server.add_synth() >>> print(server.query()) NODE TREE 0 group 1 group 1000 default out: 0.0, amplitude: 0.1, frequency: 440.0, gate: 1.0, pan: 0.5 """ if self.default_group is None: raise ServerOffline return self.default_group.add_synth( synthdef=synthdef, add_action=add_action, **kwargs ) def add_synthdef(self, synthdef) -> "Server": synthdef.allocate(server=self) return self def boot( self, *, ip_address: str = DEFAULT_IP_ADDRESS, port: int = DEFAULT_PORT, scsynth_path: Optional[str] = None, options: Optional[Options] = None, **kwargs, ) -> "Server": if self.is_running: raise supriya.exceptions.ServerOnline port = port or DEFAULT_PORT self._options = new(options or Options(), **kwargs) scsynth_path = find(scsynth_path) self._process_protocol = SyncProcessProtocol() self._process_protocol.boot(self._options, scsynth_path, port) self._ip_address = ip_address self._is_owner = True self._port = port self._connect() return self def connect( self, *, ip_address: str = DEFAULT_IP_ADDRESS, port: int = DEFAULT_PORT ) -> "Server": if self.is_running: raise supriya.exceptions.ServerOnline self._ip_address = ip_address self._is_owner = False self._port = port self._connect() if self.client_id > 0: self._setup_system_synthdefs(local_only=True) self._rehydrate() self._default_group = self._nodes[self.client_id + 1] return self def disconnect(self) -> "Server": if not self.is_running: raise ServerOffline if self._is_owner: raise supriya.exceptions.OwnedServerShutdown( "Cannot disconnect from owned server." ) self._disconnect() return self def quit(self, force: bool = False) -> "Server": if not self.is_running: return self if not self._is_owner and not force: raise supriya.exceptions.UnownedServerShutdown( "Cannot quit unowned server without force flag." ) if self.recorder.is_recording: self.recorder.stop() try: QuitRequest().communicate(server=self) except OscProtocolOffline: pass if self._process_protocol is not None: self._process_protocol.quit() self._disconnect() return self def query(self, include_controls=True) -> QueryTreeGroup: request = GroupQueryTreeRequest(node_id=0, include_controls=include_controls) response = request.communicate(server=self) return response.query_tree_group def reboot(self, options: Optional[Options] = None, **kwargs) -> "Server": self.quit() self.boot(options=options, **kwargs) return self def reset(self) -> "Server": self.send(["/d_freeAll"]) self.send(["/g_freeAll", 0]) self.send(["/clearSched"]) self.sync() self._teardown_proxies() self._teardown_allocators() self._setup_allocators() self._setup_proxies() self._setup_default_groups() self._setup_system_synthdefs() self.sync() self._sync_id = 0 return self def sync(self, sync_id: Optional[int] = None) -> "Server": if not self.is_running: return self if sync_id is None: sync_id = self.next_sync_id request = SyncRequest(sync_id=sync_id) request.communicate(server=self) return self ### PUBLIC PROPERTIES ### @property def audio_input_bus_group(self) -> Optional[AudioInputBusGroup]: return self._audio_input_bus_group @property def audio_output_bus_group(self) -> Optional[AudioOutputBusGroup]: return self._audio_output_bus_group @property def default_group(self) -> Optional[Group]: return self._default_group @property def meters(self): return self._meters @property def recorder(self): return self._recorder @property def root_node(self) -> Optional[RootNode]: return self._root_node
josiah-wolf-oberholtzer/supriya
supriya/realtime/servers.py
Python
mit
35,554
0.001041
# Copyright (C) 2012 Balazs Ankes (bank@inf.u-szeged.hu) University of Szeged # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit test for png.py.""" import unittest from png import PNGChecker from webkitpy.common.system.filesystem_mock import MockFileSystem from webkitpy.common.system.systemhost_mock import MockSystemHost class PNGCheckerTest(unittest.TestCase): """Tests PNGChecker class.""" def test_init(self): """Test __init__() method.""" def mock_handle_style_error(self): pass checker = PNGChecker("test/config", mock_handle_style_error, MockSystemHost()) self.assertEqual(checker._file_path, "test/config") self.assertEqual(checker._handle_style_error, mock_handle_style_error) def test_check(self): errors = [] def mock_handle_style_error(line_number, category, confidence, message): error = (line_number, category, confidence, message) errors.append(error) fs = MockFileSystem() file_path = "foo.png" fs.write_binary_file(file_path, "Dummy binary data") errors = [] checker = PNGChecker(file_path, mock_handle_style_error, MockSystemHost(os_name='linux', filesystem=fs)) checker.check() self.assertEqual(len(errors), 0) file_path = "foo-expected.png" fs.write_binary_file(file_path, "Dummy binary data") errors = [] checker = PNGChecker(file_path, mock_handle_style_error, MockSystemHost(os_name='linux', filesystem=fs)) checker.check() self.assertEqual(len(errors), 1) self.assertEqual(errors[0], (0, 'image/png', 5, 'Image lacks a checksum. Generate pngs using run-webkit-tests to ensure they have a checksum.'))
hujiajie/chromium-crosswalk
third_party/WebKit/Tools/Scripts/webkitpy/style/checkers/png_unittest.py
Python
bsd-3-clause
3,001
0.001666
import os from sublime import active_window from sublime import find_resources from sublime import load_settings from sublime import save_settings import sublime_plugin def _load_preferences(): return load_settings('Preferences.sublime-settings') def _save_preferences(): return save_settings('Preferences.sublime-settings') class ClearWindowCommand(sublime_plugin.WindowCommand): def run(self): if self.window.is_sidebar_visible(): self.window.set_sidebar_visible(False) if self.window.is_minimap_visible(): self.window.set_minimap_visible(False) if self.window.is_menu_visible(): self.window.set_menu_visible(False) if self.window.is_status_bar_visible(): self.window.set_status_bar_visible(False) self.window.run_command('resize_groups_almost_equally') preferences = _load_preferences() preferences.set('indent_guide_options', []) preferences.set('line_numbers', False) preferences.set('draw_white_space', 'selection') preferences.set('rulers', []) _save_preferences() self.window.run_command('sort_user_settings') class EnableColorSchemeCommand(sublime_plugin.ApplicationCommand): def run(self): self.color_schemes = [] for color_scheme in find_resources('*.tmTheme'): ignore = False for exclude in ['(SL)', 'Color Highlighter', 'tests']: if exclude in color_scheme: ignore = True if not ignore: self.color_schemes.append(color_scheme) if len(self.color_schemes) > 1: color_scheme = _load_preferences().get('color_scheme') if color_scheme not in self.color_schemes: self.color_schemes.insert(0, color_scheme) self.window = active_window() self.window.show_quick_panel( self.color_schemes, self.on_done, 0, self.color_schemes.index(color_scheme), self.on_select ) def on_select(self, index): if index == -1: return color_scheme = self.color_schemes[index] for group in range(0, self.window.num_groups()): active_view_in_group = self.window.active_view_in_group(group) if active_view_in_group: active_view_in_group.settings().set('color_scheme', color_scheme) def on_done(self, index): if index == -1: for view in self.window.views(): view.settings().erase('color_scheme') return color_scheme = self.color_schemes[index] preferences = _load_preferences() preferences.set('color_scheme', color_scheme) _save_preferences() for view in self.window.views(): view.settings().erase('color_scheme') class EnableThemeCommand(sublime_plugin.ApplicationCommand): def run(self): self.themes = [] for theme in find_resources('*.sublime-theme'): ignore = False for exclude in ['Addon', 'tests']: if exclude in theme: ignore = True if not ignore: self.themes.append(os.path.basename(theme)) if len(self.themes) > 1: active_window().show_quick_panel(self.themes, self.on_done) def on_done(self, index): if index == -1: return theme = self.themes[index] preferences = _load_preferences() preferences.set('theme', theme) _save_preferences() class OverlayOpenFileCommand(sublime_plugin.WindowCommand): """Open File; Inspired by Vim CtrlP (https://kien.github.io/ctrlp.vim).""" def run(self, tab=None, split=None, vsplit=None): """ Open file from overlay. :param tab: Open the selected file in a new tab :param split: Open the selected file in a horizontal split :param vsplit: Open the selected file in a vertical split Defaults to opening in a new tab. """ transient_view = self.window.transient_view_in_group(self.window.active_group()) if not transient_view: return fname = transient_view.file_name() if not fname: return if vsplit: self.open_file_in_vertical_split(fname) elif split: self.open_file_in_horizontal_split(fname) elif tab: self.open_file_in_tab(fname) else: self.open_file_in_tab(fname) self.window.run_command('hide_overlay') def is_enabled(self): view = self.window.active_view() if view: return bool(view.settings().get('polyfill.experimental_features')) return False def open_file_in_vertical_split(self, fname): self.window.open_file(fname) self.window.run_command('create_pane_with_file', {'direction': 'right'}) def open_file_in_horizontal_split(self, fname): self.window.open_file(fname) self.window.run_command('create_pane_with_file', {'direction': 'down'}) def open_file_in_tab(self, fname): self.window.open_file(fname) class PolyfillSetLayoutCommand(sublime_plugin.WindowCommand): def run(self, cols, rows, cells): num_groups_before = self.window.num_groups() active_group_before = self.window.active_group() self.window.run_command('set_layout', { 'cols': cols, 'rows': rows, 'cells': cells }) if num_groups_before == self.window.num_groups(): # Fix issue where group focus moves when it probably shouldn't. # When the layout is not changed then the focus shouldn't change # either. Previously, if the active view before the layout change # is transient ST would move the cursor focus to a group with a # non-transient view. This can be disorienting and interrupt flow # because where the cursor focus has moved to is not always clear. self.window.focus_group(active_group_before) return if len(self.window.views_in_group(active_group_before)) < 2: # Only move the active view before layout change to the new group # if it doesn't leave the previous group without any views. return view = self.window.active_view_in_group(active_group_before) self.window.set_view_index(view, self.window.active_group(), 0) class ResetWindowCommand(sublime_plugin.WindowCommand): def run(self): self.window.run_command('reset_font_size') view = self.window.active_view() font_size = view.settings().get('font_size_default') if view else None if font_size: preferences = _load_preferences() preferences.set('font_size', font_size) _save_preferences() if not self.window.is_sidebar_visible(): self.window.set_sidebar_visible(True) if not self.window.is_minimap_visible(): self.window.set_minimap_visible(True) if not self.window.is_menu_visible(): self.window.set_menu_visible(True) if not self.window.is_status_bar_visible(): self.window.set_status_bar_visible(True) self.window.run_command('resize_groups_almost_equally') class ResizeGroupsAlmostEquallyCommand(sublime_plugin.WindowCommand): """ Resize groups equally. Make all groups (almost) equally high and wide, but use 'winheight' and 'winwidth' for the current window. Windows with 'winfixheight' set keep their height and windows with 'winfixwidth' set keep their width. @xxx winheight option @xxx winwidth option @xxx winfixheight option @xxx winfixwidth option """ def run(self): layout = self.window.layout() col_count = len(layout['cols']) row_count = len(layout['rows']) def equalise(count): size = round(1.0 / (count - 1), 2) vals = [0.0] for i in range(1, count - 1): vals.append(round(size * i, 2)) vals.append(1.0) return vals if col_count > 2: layout['cols'] = equalise(col_count) if row_count > 2: layout['rows'] = equalise(row_count) if col_count > 2 or row_count > 2: self.window.set_layout(layout)
gerardroche/sublime-polyfill
ui.py
Python
bsd-3-clause
8,584
0.000349
from django.conf.urls import url from api.applications import views app_name = 'osf' urlpatterns = [ url(r'^$', views.ApplicationList.as_view(), name=views.ApplicationList.view_name), url(r'^(?P<client_id>\w+)/$', views.ApplicationDetail.as_view(), name=views.ApplicationDetail.view_name), url(r'^(?P<client_id>\w+)/reset/$', views.ApplicationReset.as_view(), name=views.ApplicationReset.view_name), ]
icereval/osf.io
api/applications/urls.py
Python
apache-2.0
417
0.007194
#encoding:utf-8 subreddit = 'ikeahacks' t_channel = '@r_IKEAhacks' def send_post(submission, r2t): return r2t.send_simple(submission)
Fillll/reddit2telegram
reddit2telegram/channels/~inactive/r_ikeahacks/app.py
Python
mit
141
0.007092
from datetime import datetime class Schedule(object): WEEK_ONE = datetime(2014, 9, 2, 9) def week(self): time_difference = datetime.now() - self.WEEK_ONE return (time_difference.days/7)+1 def season_year(self): return 2014
FenrirUnbound/greg-ball
libraries/schedule.py
Python
gpl-2.0
262
0.003817
"File-based cache backend" import glob import hashlib import os import pickle import random import tempfile import time import zlib from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache from django.core.files.move import file_move_safe from django.utils.encoding import force_bytes class FileBasedCache(BaseCache): cache_suffix = '.djcache' def __init__(self, dir, params): super().__init__(params) self._dir = os.path.abspath(dir) self._createdir() def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): if self.has_key(key, version): return False self.set(key, value, timeout, version) return True def get(self, key, default=None, version=None): fname = self._key_to_file(key, version) try: with open(fname, 'rb') as f: if not self._is_expired(f): return pickle.loads(zlib.decompress(f.read())) except FileNotFoundError: pass return default def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): self._createdir() # Cache dir can be deleted at any time. fname = self._key_to_file(key, version) self._cull() # make some room if necessary fd, tmp_path = tempfile.mkstemp(dir=self._dir) renamed = False try: with open(fd, 'wb') as f: expiry = self.get_backend_timeout(timeout) f.write(pickle.dumps(expiry, pickle.HIGHEST_PROTOCOL)) f.write(zlib.compress(pickle.dumps(value, pickle.HIGHEST_PROTOCOL))) file_move_safe(tmp_path, fname, allow_overwrite=True) renamed = True finally: if not renamed: os.remove(tmp_path) def delete(self, key, version=None): self._delete(self._key_to_file(key, version)) def _delete(self, fname): if not fname.startswith(self._dir) or not os.path.exists(fname): return try: os.remove(fname) except FileNotFoundError: # The file may have been removed by another process. pass def has_key(self, key, version=None): fname = self._key_to_file(key, version) if os.path.exists(fname): with open(fname, 'rb') as f: return not self._is_expired(f) return False def _cull(self): """ Removes random cache entries if max_entries is reached at a ratio of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means that the entire cache will be purged. """ filelist = self._list_cache_files() num_entries = len(filelist) if num_entries < self._max_entries: return # return early if no culling is required if self._cull_frequency == 0: return self.clear() # Clear the cache when CULL_FREQUENCY = 0 # Delete a random selection of entries filelist = random.sample(filelist, int(num_entries / self._cull_frequency)) for fname in filelist: self._delete(fname) def _createdir(self): if not os.path.exists(self._dir): try: os.makedirs(self._dir, 0o700) except FileExistsError: pass def _key_to_file(self, key, version=None): """ Convert a key into a cache file path. Basically this is the root cache path joined with the md5sum of the key and a suffix. """ key = self.make_key(key, version=version) self.validate_key(key) return os.path.join(self._dir, ''.join( [hashlib.md5(force_bytes(key)).hexdigest(), self.cache_suffix])) def clear(self): """ Remove all the cache files. """ if not os.path.exists(self._dir): return for fname in self._list_cache_files(): self._delete(fname) def _is_expired(self, f): """ Takes an open cache file and determines if it has expired, deletes the file if it is has passed its expiry time. """ exp = pickle.load(f) if exp is not None and exp < time.time(): f.close() # On Windows a file has to be closed before deleting self._delete(f.name) return True return False def _list_cache_files(self): """ Get a list of paths to all the cache files. These are all the files in the root cache dir that end on the cache_suffix. """ if not os.path.exists(self._dir): return [] filelist = [os.path.join(self._dir, fname) for fname in glob.glob1(self._dir, '*%s' % self.cache_suffix)] return filelist
auready/django
django/core/cache/backends/filebased.py
Python
bsd-3-clause
4,845
0.000413
#!/usr/bin/python # # This is a free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This Ansible library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this library. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['stableinterface'], 'supported_by': 'curated'} DOCUMENTATION = ''' --- module: ec2_vpc_subnet short_description: Manage subnets in AWS virtual private clouds description: - Manage subnets in AWS virtual private clouds version_added: "2.0" author: Robert Estelle (@erydo) options: az: description: - "The availability zone for the subnet. Only required when state=present." required: false default: null cidr: description: - "The CIDR block for the subnet. E.g. 192.0.2.0/24. Only required when state=present." required: false default: null tags: description: - "A dict of tags to apply to the subnet. Any tags currently applied to the subnet and not present here will be removed." required: false default: null aliases: [ 'resource_tags' ] state: description: - "Create or remove the subnet" required: false default: present choices: [ 'present', 'absent' ] vpc_id: description: - "VPC ID of the VPC in which to create the subnet." required: false default: null extends_documentation_fragment: - aws - ec2 ''' EXAMPLES = ''' # Note: These examples do not set authentication details, see the AWS Guide for details. - name: Create subnet for database servers ec2_vpc_subnet: state: present vpc_id: vpc-123456 cidr: 10.0.1.16/28 resource_tags: Name: Database Subnet register: database_subnet - name: Remove subnet for database servers ec2_vpc_subnet: state: absent vpc_id: vpc-123456 cidr: 10.0.1.16/28 ''' import time try: import boto.ec2 import boto.vpc from boto.exception import EC2ResponseError HAS_BOTO = True except ImportError: HAS_BOTO = False if __name__ != '__main__': raise from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import AnsibleAWSError, connect_to_aws, ec2_argument_spec, get_aws_connection_info class AnsibleVPCSubnetException(Exception): pass class AnsibleVPCSubnetCreationException(AnsibleVPCSubnetException): pass class AnsibleVPCSubnetDeletionException(AnsibleVPCSubnetException): pass class AnsibleTagCreationException(AnsibleVPCSubnetException): pass def get_subnet_info(subnet): subnet_info = {'id': subnet.id, 'availability_zone': subnet.availability_zone, 'available_ip_address_count': subnet.available_ip_address_count, 'cidr_block': subnet.cidr_block, 'default_for_az': subnet.defaultForAz, 'map_public_ip_on_launch': subnet.mapPublicIpOnLaunch, 'state': subnet.state, 'tags': subnet.tags, 'vpc_id': subnet.vpc_id } return subnet_info def subnet_exists(vpc_conn, subnet_id): filters = {'subnet-id': subnet_id} subnet = vpc_conn.get_all_subnets(filters=filters) if subnet and subnet[0].state == "available": return subnet[0] else: return False def create_subnet(vpc_conn, vpc_id, cidr, az, check_mode): try: new_subnet = vpc_conn.create_subnet(vpc_id, cidr, az, dry_run=check_mode) # Sometimes AWS takes its time to create a subnet and so using # new subnets's id to do things like create tags results in # exception. boto doesn't seem to refresh 'state' of the newly # created subnet, i.e.: it's always 'pending'. subnet = False while subnet is False: subnet = subnet_exists(vpc_conn, new_subnet.id) time.sleep(0.1) except EC2ResponseError as e: if e.error_code == "DryRunOperation": subnet = None elif e.error_code == "InvalidSubnet.Conflict": raise AnsibleVPCSubnetCreationException("%s: the CIDR %s conflicts with another subnet with the VPC ID %s." % (e.error_code, cidr, vpc_id)) else: raise AnsibleVPCSubnetCreationException( 'Unable to create subnet {0}, error: {1}'.format(cidr, e)) return subnet def get_resource_tags(vpc_conn, resource_id): return dict((t.name, t.value) for t in vpc_conn.get_all_tags(filters={'resource-id': resource_id})) def ensure_tags(vpc_conn, resource_id, tags, add_only, check_mode): try: cur_tags = get_resource_tags(vpc_conn, resource_id) if cur_tags == tags: return {'changed': False, 'tags': cur_tags} to_delete = dict((k, cur_tags[k]) for k in cur_tags if k not in tags) if to_delete and not add_only: vpc_conn.delete_tags(resource_id, to_delete, dry_run=check_mode) to_add = dict((k, tags[k]) for k in tags if k not in cur_tags or cur_tags[k] != tags[k]) if to_add: vpc_conn.create_tags(resource_id, to_add, dry_run=check_mode) latest_tags = get_resource_tags(vpc_conn, resource_id) return {'changed': True, 'tags': latest_tags} except EC2ResponseError as e: raise AnsibleTagCreationException( 'Unable to update tags for {0}, error: {1}'.format(resource_id, e)) def get_matching_subnet(vpc_conn, vpc_id, cidr): subnets = vpc_conn.get_all_subnets(filters={'vpc_id': vpc_id}) return next((s for s in subnets if s.cidr_block == cidr), None) def ensure_subnet_present(vpc_conn, vpc_id, cidr, az, tags, check_mode): subnet = get_matching_subnet(vpc_conn, vpc_id, cidr) changed = False if subnet is None: subnet = create_subnet(vpc_conn, vpc_id, cidr, az, check_mode) changed = True # Subnet will be None when check_mode is true if subnet is None: return { 'changed': changed, 'subnet': {} } if tags != subnet.tags: ensure_tags(vpc_conn, subnet.id, tags, False, check_mode) subnet.tags = tags changed = True subnet_info = get_subnet_info(subnet) return { 'changed': changed, 'subnet': subnet_info } def ensure_subnet_absent(vpc_conn, vpc_id, cidr, check_mode): subnet = get_matching_subnet(vpc_conn, vpc_id, cidr) if subnet is None: return {'changed': False} try: vpc_conn.delete_subnet(subnet.id, dry_run=check_mode) return {'changed': True} except EC2ResponseError as e: raise AnsibleVPCSubnetDeletionException( 'Unable to delete subnet {0}, error: {1}' .format(subnet.cidr_block, e)) def main(): argument_spec = ec2_argument_spec() argument_spec.update( dict( az=dict(default=None, required=False), cidr=dict(default=None, required=True), state=dict(default='present', choices=['present', 'absent']), tags=dict(default={}, required=False, type='dict', aliases=['resource_tags']), vpc_id=dict(default=None, required=True) ) ) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) if not HAS_BOTO: module.fail_json(msg='boto is required for this module') region, ec2_url, aws_connect_params = get_aws_connection_info(module) if region: try: connection = connect_to_aws(boto.vpc, region, **aws_connect_params) except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e: module.fail_json(msg=str(e)) else: module.fail_json(msg="region must be specified") vpc_id = module.params.get('vpc_id') tags = module.params.get('tags') cidr = module.params.get('cidr') az = module.params.get('az') state = module.params.get('state') try: if state == 'present': result = ensure_subnet_present(connection, vpc_id, cidr, az, tags, check_mode=module.check_mode) elif state == 'absent': result = ensure_subnet_absent(connection, vpc_id, cidr, check_mode=module.check_mode) except AnsibleVPCSubnetException as e: module.fail_json(msg=str(e)) module.exit_json(**result) if __name__ == '__main__': main()
andreaso/ansible
lib/ansible/modules/cloud/amazon/ec2_vpc_subnet.py
Python
gpl-3.0
8,929
0.001568
# Generated by the pRPC protocol buffer compiler plugin. DO NOT EDIT! # source: service.proto import base64 import zlib from google.protobuf import descriptor_pb2 # Includes description of the service.proto and all of its transitive # dependencies. Includes source code info. FILE_DESCRIPTOR_SET = descriptor_pb2.FileDescriptorSet() FILE_DESCRIPTOR_SET.ParseFromString(zlib.decompress(base64.b64decode( 'eJzlvX10ZFd1J0pVqaTSbbV0VN2222o3fV3+aKktVbvbxoY2xqOW5LZMd6unpIaYGSyuqq6kcp' 'fqirpVLcuBlWQyhI98vEcAY+eBSfgyMQFCgCQDi5dhArMWvIQk7zFkzcKTYQWcQAjmw7OAISHv' '7d8++5x7bpXaNoTO/PG8Elq177n77LPPPvvsvc8++3r/UPB2x2HrQr0aljdbUTsqDtbqcTW6EL' 'a2x/y1KFprhEf4wUpn9UgtjKut+mY7aunGpX6v7yVRvVZqe2qWn62ElTDejJpxWJz1dpn29ai5' 'L+NnxncdK5U1zrLBWb6z3ghnLd7FsF1xXyuOeQWhL96X9XPjgxX7+9hJb3DWEFs87hUMCcWRsh' '1EGfSN7XcA3YSWnnP3N/u9gupTz1H7Vcb7WKYwxD+Kxx7L+DPR5narvrbe9o/dePQWf2k99E+d' 'm5n3pzvt9agVl/3pRsPnBrHfCkFaWCt7/rk49KNVv71ej/046rSqoV+NaqFPP9dARjOs+Z1mLW' 'xRk9Cf3gyqQEyjIoIm/ZeErZgG7x8r3+hRg6DtV4OmvxL6qxG95Neb/Nap+Zm5M4tz/ipxsOx5' 'hUJW9YNs+jNXeI4q0N+HvalCP/29i/4eUZmxg75lmF8TPhCBwlBCMoTmhQy9UFCjXsS/soRgWG' 'XVTWPLvmEejbXdaTVjP/Ab9biNsRosfkAkBr4jKz1TbAYFQmKMZcMPGg3qXpkOiYRhGs2wA8kS' 'RKm9DiRHkIPqiHeo0EckFjHGsf0+Jhx8Jo6FG5vtbX8jjONgLeTR9THqIo1uyLuZfmXUZXrax6' '73uwXjYhyit/i9gtrntfgXOLSPOHTz2Io/mwgvDbDZDurEJUeiY/CKRmtxToIBdRKE7U3DvEaD' 'ONQKmnG9Xb8AOjZDkpVmtc4UKNNnoZ97vcaBZAhyLbEkgeQIckzd5J0QSEaN0TsHx475i3bCWq' 'Ghxm8GxC6fVueFeo1kdGWbppKluuX0nCn0MRYXkifILpKZBIKeimrMgeQIckA9d6WfV/9N3lt/' 'K+c9o5IpjnSpjNJpb7RHpIrP9/qwEEjP5EjPXPsMeuYswBV+o/TVPm/PDk+LRa8P3GDNNVjhv4' 'v7vAFaq+dJnEgbAWx+Fp/reXaetvflWFU5kOIN3uhmZ6VRry47zTxqlq8o/WA2aXzIG9kKg/Nu' '013cdBhgp+GMNyTivQz52dfHo/d7Rt898l3y1hK9VJz2BsNmZ0NjyF+Ef3PUohtLAa8JigGRoH' '39jOBQDwIRt24c5j0aymB4f5sUIHaLAUZy3Q6zGDZq3SiS94q3eAORXmb7CrzhXLWjICzoNhXT' 'uDjvKa2ml6Gml+vN1WjfICM42DsQbjhD7eapWWU4Tv0uXu71x9u07u/fN8QSIr9K/6HfG3k2In' 'abl1/FKHm7e9Y80O+kmdj/EzJx2tvVDON2WNMSkXuWMuXpl3pFqu8nEqmf8UYsScukDNeMbB55' 'JkrKc+a9Cl6rDIep32SZeFEzjFZpeVUbJCc7c2kBTXq4FGlotVF8QSJqAxeRlNN6kfVI2zlv2N' 'gKMrJBJqL8jCOryGt6YLtb7s/iNZ4FLLNYeayFhgzwDMHGHvCG0+wp7vXycTtotVkK8xX9o6i8' 'HCkZ1nL5Cv4s/qtkwDke8PW9M5rC3D3usVu93akBPNuuS6/yLtsRNQnJ3k6z3myHrU2ySGjcuq' 't9fztwEZk757bWWCp7Or3Aw4OFrw2on6f/sqVP9Xt7d1ozOy5fWv4kwSthi5mUr8gvWhH5RrAS' 'Nmg1ZMaHj93wrFZl+RReqeg3iy/y+kRFA8PhZ4cBa6nC7xX3e4P4V8tGP9NcAAByAUObl0ktNF' 'ub/Q3BqoWrQafRXr4QNDohCzwJlgBfAljxoLdLr6o6vXM/a898RS+0eUDQ/X0xrWURTe4CAO7+' '1m7FfWDn4fWsJdoqtTUhUx809o0SgkJlWIMXBFr6/azXx4plxNu1dM/ZueXZhXMnTs2pTHHY8x' 'hw56mF6SWVtb/nzyzdcrPK2RfOaUCf2+CmYypPAjukEcz/zNwstehPQ6jNQHG3N8iQEwsLp1TB' '4lxcqsyfOakGLc6TlYVzZ5VnMZyeW1ycPjmndtkWJ+5ZmltUQymyqIvdtou5M+dOq+HiqLdbd2' 'GIGOkCEaUqIURjGU0BqEWxNOPlWQxJ3IdPTZ+YO7W8cHZpfuHM9CninYVV5v71ufnK3Czxz4Gd' 'nZteIliuVPX27qRQd1xCjixkLyILjKtbFkpfyXp7dthUduzkDi+vZVlvsxM77k4s2T1bLb/nmh' 'q5i5gaQNEjsC/vUf56f7zl2eyPDPvxNoH8DpvAbd5oD6JnrYx/MePtuxhznkElZlMq8bZuDl59' '8UnometHM97lO5uUO9LwIq9/IySX3ZhVvXvXaX7cPdnylrvb5y5mF2pqeih9fda7bEfkOxJ6wP' 'Pqzc1OW5tOWhMPMoSVF7Rsp22f5/i5p0Hc4PkJoX1M6HMvMtIewbzRU9VGPWy2l+N2Kww26s01' '3moKx/OrQSMOKyP68aJ5ije0b+i80Z96Qz+2b5TeOOjtcgzw4tXe0H3BhWDZOFWaE7sAOyuO1Y' '3eXm5CY6SOqo0gjplpBW5axLMFPJoxT4rP8/bwGxu0N9U3G+Ey3LyYtxxL2ShanJYGoCgms/AA' 'v7YWNsNW0A6Xw1d2qO0yueTL60G8vm8vEJzI7stUrkTDk9JujptNN2t3UaPice9yxkIcoQEvV9' 'fD6vnlTnv1+fv2u/0zhYvcZgZNzlGL4qI3hMnYqD9ANEct3kOHd1BNDgfLC/LCafI/jucXz87N' 'zVZ2GSx3Ri0I1FpkGbxLC9RaZNhLzKpW9ZjJN7WxNpViVrV6UjcwMQNaD5clzHJfHO0ZZfer1O' 'Pmdu+LxVSPm9vdr93q7d1c3+x977D7XpGadL94HXvmrbBKc1Xbd4Xb3HlQLJP4V5fDZrBCEhO0' '6I9430Fu3NdudciLqFbn+OE0Pyse9kajlfuqWiKXCc1q/f591zJ7R/CA5fEsg4sThDteD1qbrJ' 'Jjmoxw33W6qYafMWCsiHirvto2GA/pFcEwwTbuKXAi1fE4NxsmuNsvbQZomXQ6oQ03AiY93uxd' 'jkak6IJa0A6c1pPcGmw/LQ9TdLY6K9tWsKY0nYAZ0bpkxnnpuDfkyn1x0NOSTwYJGUEzC7MwX1' '42R7YImVGn5pfmlivnzizNn55TOcewv7uvcL06VPqLrDec9tSKL/SuMGGVOGwvb9VbvCA3Ar05' 'WvnZK60Ww/ZLqc2d3KR4yjvYjEgBkOIIWrXlJKC1HFRJIONIb4QWy1XNaFEaJzvEtDTtEt/cxc' 'SXrOuNYJPkt93aZvu8UCkQYA6//0XcJOJmQQ3S/w4qr/TXOW/Itdfh/lR5x8qwTrvmaa378gy2' 'suP92jiu6DdhRkDYQm2MFCryq3jS678vZtz9jHun2J+D++5FRj549+LymYXK6elTFXm9eKXX1w' 'ge2E5vegx6tpNAGBCgS281DLqEi+GIl2d+FT1POKaeUyx4fTMLFSwIWgEaunx2fm6G1kTpeV6/' 'ZgIWi2UDvaR/Co6MeXru9Im5isqmp7pP5UsxrULHDv+Xccb/Y8bb5djVMIiCRiPaWg4a9SAW0f' 'AYNA3Is526f6Elklf9pUcynuo2bLvIzPyvJLP0tow3nLZmu8i7+n8peV/OertTNuyzpe6V3mi9' 'Fm5sRm0Ez5cb4YWwsa/ESqM3qJjqoTyfvHcKrx3fMz87d/rswtLcmZl7ls+defGZhZeeqah6V7' 'NLuOzPeqqbqOIV3k5k0cre442cWaA9kTbGuTvvnJtZWtRxD9t6KbXAS7+R8/bsQAmpce2xaCdq' '6tlQX4bNcJZcSXFwyBYiLjXb9dU62fPaB9duzEgC1yGlSa+4GelDsGXCK43h1vRVlHky32zb1s' '1wLehqDWWeqyjzxLYm+6UWdWDr6XbYOzKVXRpmm4gVn0S9hsgUY5hucsgbCdbWWkBuEGm/ZNiC' 'ueHY3V7B8AFbNThBphM721kEwprmIXVaj5eTIH6Wnhcqu+qxDYCWHiWDJX0IQb5LoRGRkOuTds' 'zN+DOcW5RPSfuKfXPs8xmvYMC03fZtBu11Rpc/kVWZCv8GnCzAJouAwPEb89oIgxo7PdHGBs1k' 'bOZV4DMCxllYuxXUG6m2fdxWmQe28XHvSoO3RlYoOVS15KV+Dm5cIQ1m5bl5t/QXGW/UuGk1y6' 'zTnhc0m1HbZVevKPe8V562L1UcBGMbnpc8uSjbaJ+SEyY+ptSOvadB8OcQflkJ1+pNiRvrHyb8' '0mfDLyf+twx5bNFGN70nVFd0Ib4r87IXrdXb652VMrU/shY1guZacs7Kf1SnyJ+aWoucU9fbkj' '9/kMm8M5s7efbEY9mxk7q7s4Y9lXC1EVYx5Lv//ImsN6gOqeeoXxlQGe+xkcIQ/yoe+9SQzy9U' 'o4Z/orO6iuP+KV+jOhT78Ch8VhjVdYSgfG1je6msixufLy/4881q2b9IwsV6u70ZHz9C4yA9GG' '1SR4ZBGPumEDG1ook44nl+JazVsZRXOnxWj4P3ThwiuUISNgBZqTeD1jbTFU/6W8RNP2rxv1GH' '6NyIaqSu9IKZ5HN06nmj3iapSc7QOdcBCRurEawSklMkBtTqOhuAXiI8Yfs4kYT/DncRxvkCbg' 'rJRiduI/0ikDSQYCW6gEfCMc8nUaSNe1Inn5jsDLfHZq2LHCTHNIL6Bg75L0IEdebwwhBBY6x1' 'iDBLh5cQ8s+iwzNJLrWo2sFaDswkHSH+R/Sk5ZOkkJ9Pm3zCap4geuj5LvV2UGfCOr8JxFC4IM' 'iVrWaUPGO+19uxx1kcjCoi6d0ItpGIQ5JCxEc+LUiChhAKImKD9mJf86SNpI8WbTg1f5UeeCYV' 'aLW9BTERCfLjzbAKCaK36hCsFmSnqaUojpl2z1+6a37RX1y4c+ml05U5n/4+W1l4Ce3cs/6Je+' 'jhnD+zcPaeyvzJu5b8uxZOzc5VFv3pM7MEPUOW/IlzSwuVRc8vTS/SqyV+Mn3mHn/uZ85W5hYX' '/YWKP3/67Kl5wkboK9PkJs8tTvrzZ2ZOnZslP2DSJwz+mYUlzz81f5pc6Vl/aWGSu+19z1+40z' '89V5m5i35On5gnz/se7vDO+aUz6OzOhYrnT/tnpytL8zPnTk1X/LPnKmcXFud8jGx2fnHm1DR5' '6bNl6p/69OdeQpaJv3jX9KlT6YF6Ptk1cxVQ7w7TPzFHVE6fODWHrnics/MVMncwoOSvGWIeEX' 'hq0vMXz87NzNNfxA+yhIiieyYF6eLcvz5HreihPzt9evokjW78mbhCEzNzrjJ3GlQTKxbPnVhc' 'ml86tzTnn1xYmGVmL85VXjI/M7d4m39qYZEZdm5xjgiZnV6a5q4JB7GLntPfJ84tzjPj5s8szV' 'Uq5/jcZYJm+aXEGaJymt6dZQ4vnMFoIStzC5V7gBZ84BmY9F961xzBK2Aqc2sabICbN7PkNqMO' 'iYk0pGSc/pm5k6fmT5I1OYfHC0Dz0vnFuQmasPlFNJjnjkkGqNNzPGpMFNHl6b8d0Z3k+fTn7/' 'SnZ18yD8qlNUnA4ryIC7Nt5i7hedk79qWsJNsd98+TIoia/ypR7P74ixnkvyRo1YIJWucnAqxM' 'gkSkhEhhNXo3INrf6mukzrap+WLQvI9W9Mn1cCPYCtqT/t3h6qo/GwZQ56SfWNPEvAhJL0i0J9' 'bKidYztnKbJqb1Vbhab4qCs9l6epPm1oSLFgBZivWaCzYJfpz11YDVQRqM3JZ2YxtqJvB3SFDy' 'rBYJmtuiE5G4gi0UynI8LK+VbZuWNpGg0vw6eQ2tdjwhWYMTtFfvo78K6gb66xyAhV3yN6CT9N' 'ckQzPyN6BT9NdRhpq/8VeZ/rqVodfJ34Aeob+uZui18jegN9JfBxl6UP4G9Gb66yrv5zP096D+' 'MdZ2swk103gDIoaZSHcN+xLUKZLWyN3DVt6yJsrUCsTC84PGGslFe32DpCBqHmr7W1HrvF/rwK' 'D3V6KoTZtGsLlJv4g1DU4rfD5RcFxlxu5lCTCGBG0KG5s0JS2eOH380jNLi2HbpAACP8RDU+9p' 'UaC58MkHiG1eIfL3nq8K6kpvmH8hr/AFKqvGOYdOJz32McRzIP0E2aWe60AyBDkoeYEmVfIF6n' 'p1yDvKOY+305heRmO6xuY8IpETQ2qQ3+bKpZPweDsRtt97oU14fBGRsb80qcUXO+YkMbzBThZE' 'tkXcdEyXdisM3dTFPn7fheQJ4iYQYhgvUkV1eSqZ8UXqSjXmTdlkxjsIy3NLB3yW9dJqFBFF+K' 'e8ErToz7Bd7cpbvKMnb/GOnrzFO6jbK1N5i3eoq9QB71aBZNUJwnL12CH/jLEUZEJ5Yem8Sasg' 'HAKyRMCJFAFZIuBEigCM6gQRcJUDyRHkoPK9JYHk1CzEYmzW59QKTQLkS2cSJnQIWWJL2bxBbY' '6xxeZQlyPqZlPU5Yi6WaJOOZAMQUZVyYGAmutIuF4tkD51krBcP7bRTR0Coc+ONtKSd9J61sbZ' 'FLsI0OUb9bWWVm5Rs7Fd9mcjWJmwxpxh9AkBLiRPEHcYWAUnaRi+A8kR5Bp1nfcCgeTV3YRlcm' 'yCXY52tDnFAaOUinc3AoeEPJFwd4qEPC3Tu4mEMQeSIch+4lsCyRHksLqBlz8g/erFhGXKtugn' 'vC9O4e3nNmb5a0iGIAdFaWhIjiA3kN42eAfUKcJSti0GCO+pFN4BwnuK8F7tQDIEKakJB5IjyC' 'TRZ/AW1OkU3gLhPZ3CWyC8pwnvQQeSIYjv4C0Q3tMpvINqIZXoPEh4F1J4BwnvAuG93IFkCHKF' 'w5lBwrugribZ/X5GQJ46R2iOjP1NhtQ7z2PYqCV52yalKLWv0rLpaIfO2hiOf0aSe0/UYT8hDl' 'ZD2r9b4QY8NJYTxLfI/NfdmK15PWjhcNxvdejpBjkvq51mVXdcb2+bxZNsgeRDTzHIpYqQN2nz' 'Y+OHOoV1I74w3CcS36jhJo97xMFzKQ56xMFzxMErHUiGIGPqsAPJEWSKZvgVAtml7oEGHjvL26' 'NOdbXKyNlG9OPOpqx9DpXpzPMSNztWYntL/7ip5FC6iyi9J0XpLlqa96R05i6i9B7SmfscSI4g' '+0lpTxAkq15OW96racvbn9ryxKDj3HvZ6qB+X05b3RUsfFne6u7FVseos7J53WsJysrmda8lKC' 'ub171288rK5nUvb14Gb0a9grActi2wO70ihTdDU/IKu1iysju9ghbLdQ4kR5BxWj4Gb1YFdhFm' 'ZdMJUnihWIIUXlAT2EWYlU0nsItQ/64SlhtsC2wX1RTeHOGtWiWXle2iStNwvQMBngkat8Hbp2' 'pWyWVFf9dSePsIb80quazo75pVclnR3zVWchqCKzCrhOW8yiUQemuVNoF9nm8hmN811acOlIYQ' 'BGh04jpvinvdFkQR2gx1QfME3a2KXdAMQfdQH2lojqD7aVN3e86odcK6n3qeu3/nniET6z09w2' 'pZ7+k5w/j2kMyloTmCQu6KDjSr7iOsR1ItMRP39fQFWbmP+ip1QTMEvYZmMQ3NERTqwcxtXjVS' 'soiNsZGaW2yMDat2NCRDkDFHFrExNlKy2K+a0Nu2BTbGZgpvP7dxZRwbY5Nk/LADyRHEpXdAbc' 'J0sS2wMW6m8GJj3EzRi41xk+i92oHkCHItrdIPZoQ9GdUhNPer3Nj/kfE53w5a0gQw/Xaw5uus' 'uLjsV3aA8oYCW8cEn6DgeRuJEXyy5wU+pxRasypGlMtcgEoQSyPCyeFCHxexGsFm2bNLJcMUe6' 'QGfQvBUrnwNEvFWPYXUkJkrPsLKYE1Fv6F1FIxVv6F1FLRlv7W0ywVY9xv9fSMpbLV03OG8blL' 'xRj6WykVXVDbMAHtxMKe2U6JA+yZbRKHfQ4kQ5Arxa7TkBxBYNe9JiOgQfUqQnPdWCeZE20TcO' 'hx0t9ar1fXd5hzM+U7TS8CC2vEl6aObeJlbQqEiZxFzWpYTqiHHfWq1HgGiV+vSm1lsKNeRVvZ' 'QQeSI0hJXevtZrX7c7S3/lJGZXj/hKj/HO2fB7wK/4LQ/EKG+jg+Nq1daHi85KqTIRMaC4td4V' 'oUxvDJW2E1WmuST++vhy2S2sUwtK7KqMFJhDNWF9QPEHaJBJQB6KC6yQHlALpFvcD7twzKq9cA' 'z5Vjp/0ZTm+M2aVnI58sug7RuZFQ2UxWWmwveNr15VI6orHT6An/bpKPUQMgohg06oCyAF1GrZ' '7H29/rM8TSJ4mlY9el7JXEbiRKAudmIyYCeyK9WKCVU+SfCF+8AYP77QxtgqMGRs0IOqB2eT9j' 'QZilX83Q6tk7NuPfCGPSigziKmGrhcuunr/Qwp1VerwV1lv6GXGAprJeJTO4FQYxDss97zIXM3' 'XIuEe6wFmAi2qPt8cBZ9Svoe2eVNuMAQ93gbMAj9LqfpUDzqo3MoqxNf8MrZ6X1ddeRoqUfEsy' 'z2tl3z8jZ8FWt7aD86F/9EZaX+2Q9C/f3nVy4P36KqlJ85Jjujbq58nA7xouhvDGXlqFLNDqDj' 'enfp3ZnmqLNfTrvRwj+QUYHHOH26fe8lMc7k3HfrzhQure0jtc2Nxv6R1uXj2Itpel2mLDZ7Dq' 'AmcB3kPMcVH0q7f2osDe/tZeFP2E4q29KAbUQ2hbTLXFNs7g3V3gLMCKlquLoqAe7p03qP6He+' 'cNMdaH9bz9bcaBD6pH9JL7C/I+g7WpWtiob9ThHtnkAVpyJ1tRZ5M9FKy7JJuF/SXsDolXRUpB' '+09l/65oi7y/1qQOf9/kIezSCO1JWuzH5HqSLonb9UYDWwQyWxG3Y/XMm8sad7zF/iq7megRF5' 'ND+lM/lN0oiP1O83wz2moKpEtGsIU80isjg8SXRyAje7xjDthT70Dby0sH/FNhc629vjNjUqjg' 'qr6jd/496uEdmP/LvHEHvEu9UzN+Dy2OLbDtgr1xn8YLx/KdvZTvIrzv1JS7QjGk3tUrmkOE4l' '29QjFEKN4FoUiL5m717l71t5tQvLtXNHcTindDNNNrbFi9B22vSLUdJhQMHu0CZwHeS5aQi2JE' 'vbcXxQiheG8vihFC8V6NYtIBK/Uo86J0BfRLnFJLOr7uIlGE+9FeJinC/ahmkot7VL3vx8A9Sr' 'jf14t7lHC/T+M222VGvR/b5e+622VGQwfIrjxiQdguP8AcGhu76HaZUGGM3Q+ktU5GdsEPYOtP' 'JkCbux9MT4CxWj/YiwK74Ad7UWTVh3pRAPOHelFIa6AYYSAG+HsZDnuMGgBtNL+XmFx9Ytb/Xo' 'atxQSUAQiRjwSUAwh2tUGeUR8BpqtsGxjvH0kjh+X+kQxHbRMQvzhKg0pAOYDGiFCDPKs+mqYc' 'W+RH08ixjXw0jRxUfRTIL3dAOYBA+TsyAsupP9T24q9m/PlV316ohBDEYVvSHRCKM6Y7QWmTpb' 'YrEWcn1CX1wbzpSQEG86492mqyj2iv9E367oVAeH3JhcFyMjSEZf4wPVrEZf4Qox1xQBmAUFAj' 'AfHYriAr9ONZgfWpPwIqf+x9WQ7Im4AZBoCb2rpqhRBej1OZFfibA2s8ePPE8/leWaxzNQL/UP' 'nQJKx/BF87jcb2FG7TIAcQpssCDjW36qiJMnPDDVMwQPy4GuGAzvNbnYYYJiYbg0z2mu3WH6+X' 'qe/VeivW0Vp9PV3qbIgNDbq9ZFQ8D0ELOWRclqOZtGPHhbbPSRwuY0OOGIhjLs+3VyYmnInoM9' 'xzQXmA3AUDrfNHWDD7HVAOoOeSy/ULRuzy6o8zXLdjk+ch8UKenvckR4gsy01esHQerPagr6LG' 'hbCW9iSDZpPMe9rOrXA640Hs5o/T48lrutzxwJj74wwX/khAOYAOkGf2USNY/epzQHXt2Du1YJ' 'Es4dKOkScbgU+F2ds0Ds46ImVrUoJksGymyqnRShQ1wgCsKeHmTglLpcTZwCVpoVM4u/uR68y6' 'Gzxhb3Acy5h8sGBTcwuH7VvB9oTpDEZ0F6IZ216TpZPWuKX/otv9o8eez6ImjRAcX5hdGNfZDR' 'PHdRLDFPkd2oa/I+E3wlyfS09BP03B59JTAGP4cxl22RNQDqCSusZ7rRGpAfX5DB9jXsD6ZP2D' '8EEsRwu1kAP5gc9XuI2cuOfZNFWHYj8ppeDpozvfPUqp6zA7HA89VWQ78huOYCHI9vn0qAZoVJ' '9P62fY55+Hfr7KAeUAwsnoj8yoCuoLelTfyPh3Ly6ccZaEIarMcQieGtHaiKz0HO2XRW150C/k' '5wa6aeCX7DX2kkQGoBMS/JMyTH5EHOKekK2hLW1oq7Kr2upt5EKGyKTT2WI6LQ6o6AHTT/QR58' 'lKbbPqq+NAx6/Sk8ZMELuLFBGqL6R5WSBefiEtIXBUvpBWOghSfQFKx7cb6aD6Iq9R2wbhoi+m' 'kePc7YtAfoUDygC0zxE/RIy+qMXv9wcF5qlvZjgG9u5BZjSt1ESXBeLI+CVzBFcq+y+FQrZPrH' 'yZFmAk102qnkdKIHREFaGvtaBVo51Cjunxkhz3aIRd1QNYkya06BfMlrwSNiJIfmSXBZI5kE4E' '7RT5UaNmyKtKMIlFwVLDyPnaEhuKaZmEXpbFFvvanQrAihKO1dbDdr1a0s8nJXeyhz4k95Be54' 'xSXnLjYVBdNyTZIeqX1kKkAtH/oyPbhe5houwvGogQFdMugqwBe2BvjiM5yqhJqunzeHODlnXl' '9Nn5nZBZKwcRJXh2q21sPbRESuRgNmSknHrlGh7av5zsmTUTPKzCO9UdxyHtZxiRTi6exERhDp' 'pRc4o2kZCd6DRe6p9Uu8yRnTXrTWPdscOM9NQa8n5FW9qukKFeb0DStmi4JteU9cdWC9mhiTxj' 'VyJm8dFoPTYJYNSUzCAWjhbSx6Zop+KEJCd9SB+pMltIKcQ0KZw3sUXTDgXeM6buyaVRxtGkHy' 'JUjBPotXUxwDB3rfCVHXL54ehHPXxYkuUZct4qTda2pHPQ6G3aVTsJ8tKgDmFNhjSt9VUbwKy7' 'Z680CpRqIwNgkmQXlLBoyEyY+fS6BwHB4NjGsS65jo0UQlnrNdII1iZd8rYJO/Lhtu00egkStk' 'tfkS508QpHw+Lk/JtpJeiRhv1m2r5GQOKbsK8POqAcQIibX8uW4HcQ5f0fiPLuTUV5zfgQ1IU5' '9R0d1B3hn/DMvpv4N3nxzL6bUJQXz+y7ic7Pi2f23cQzy4tn9t3EM8uzD/RUovPz4pk9lUaOY+' 'mnEp2fF8/sqUTn58Uze0rrfBzB96vvY7hvzNJwr3SH20wcCRkz7JfvZzjdbIR/Ysw/SMbcL2P+' 'QUJWv4z5B8mY+2XMP0jG3C9j/kEy5n4e8w8zfEpp2mDMP0wjx5h/COTXOCB+8Vo14YByAOGk0i' 'DPqn/McO6KaQOn6h/TyGEO/2OGs1cSUAagK+Q0o1+8UQIhf+UjfCbOZ9z/PkuoXptVubFHsjsc' 'LBq7WodxnSNAievudKyIO8T1rjNEzNGOB4hd54cIQYfmyoNWFkjFJ8Xb1qbN01a00l3Su9jJ8b' 'Y5ZfTjTnXdPGJdGWySLtxs1Wnxco6NOAVMsWTZ1Jvtm455pA42yGItG3brNABim0fidLUFQcZe' 'k93hdPMytwnNHTca6gLnAd4tUZUEnAEYB5xpcA5gnHC63WfUL2XliPNi3UMuf6m3e0RKfqm3+4' 'xGiVPONDgHMBbABzIiWjn1KxCjG3c+nL6oDKUfdMuSB6uV17Y875YnH05DoBP6LipbniNcZiUg' 'xMEEu6B+gHaJd9AvIQ4CHVA3OCAeaVkd8SIB9an/PcuO6L0JBQnRFz2PbYXajN/xyNXb6czV0N' 'BnunRBeYBczYWwAIGMD9cvYQECQaHeRD8G1JuzpFAfzvYk8mrKzSmh1q+iWuFE0WsFsfIHWOzf' 'krWqdUCE/C0JgQMi4G9JCBwQ4X5L1qrWARHst2Stah1goX4wawN9AyLCD6aRQ3wfzFpfb0BEl0' 'Am0DcgYksgE+gbYNX6UJazo00bzOZDaeRQrQ8B+X4HlAHoKtHJA6JaCYQM6esLqGPwNrD2t8Da' 'y1OsNbWSNTfhRr0N3Hwu01Rgbr494WZBuPn2hKaCcPPtCTcLws23J9wsCDffnnCzwHQ/kuV8BN' 'MG3HwkjRwb1SNA7jsgfvFqGl0CygGEnASDPKt+M8u5L6YNuPmbaeTg5m9mOfslAWUAQvpLAsoB' 'hPwXnOsMqneDm78Hbpa60u9wxVpHGdKchQ/57iynEYzwT3D2PQlnB4Wz70noGxTOvifh7KBw9j' '0JZweFs+/RnP2NjMAy6rezHDZ4XYbUL2fz4/qHTuznSBprAg5u0MLm+M2OMTSyWslrI6dlp2J8' 'NqxG5minbW7RrcJNiHrTEQ29mOXfTg8Ua+a30wPN6DEUZbENyiwT6KCs9kGe5ceAqWTbYJYfSy' 'NHtPGxNHIw6DEgP+CAcgD5NPEGeU79DjBdZ9tAUf9OGjkU9e9kOWMmAWUAulJEdlAUNYGuIUP0' 'LgH1qQ8B061jt/rz5p54DK9Ce9q+LkEFx1IXdTJwc2knIaHP4HJBeYCMBT8oGphAyqEKGvhDmq' 'oEVADoOnWLAxoA6Eb1PEt7Xn14Z9p1faxe2gXeSzuCsB9O057X6F3a4TV8OE07grAfTtOeJ9o/' 'nKY9T7R/WNP+YSRBeeq/YO3+eU5ljp3xb//n/+f5UlHBO/bnu/05RCdsfnOSVq/vtGI7XQ8uWK' 'c5LvlBW9/wdteh59/HDqO9gx0727X2+fjiLC0x2pZNbKLmxw1cUyXjsVbHxTCaFuzciDUE2ybO' '6dMW2o7ZHdc4YIGSr1/f7DTY+7dRQzNVsIFNIhEQ7ZxIFMQ9iUShf1hY4+CSUTptjQ2yHbIdIm' 'EifZGIwwR1aJc46nLD2ZLinKskXG4ssg0cCzTjNnnEOsLBmVlclp3TjMRpb6aITE5NqkGjweXs' 'e8tclN1QMGs7nk+rLOX4q9qK4phjP70s8F8a6pMY7btbbEhViPQs6CCtw6MtPrkJScfWG+la9J' 'NglZaOzYjo5U55DmNN2koYNj3NN51hQZxEGwc7VD+zHVNtqzLY7BqWlfRsy6Xh6noUhxzT01ee' '4+Oef5jDOKahpowDxSZ1j70ym/GFKSb1QM5PQ66dk8DienPUWiOpfEBuohNKPoe+f5Psc076aJ' 'g+JpmRkmwoJD/vRvoPWNrrLQ4HvQD/mcMiOe3ajjr6xnPcaclCAKhmr+DoafBlZhk1U2GVmlBQ' 'fppRr8gVpnhd2M/D17ea+FaDxYaecEihrw212edr12Grh1MbiMGZQgdTa41oJWhM2RmcaoVruB' '2+7Vwk5cFHxmZ30mltUu4iEmi2zVVzDJ2vfN8XkmPLxj9ftwSehRUAySuYmvE3G521enOCh5J6' 'ZStciettHFKS8XOB6EX+zYRc7GjhXKUZARkfJIPt9282WB9FW8x2rLUmPxHO0yI+F3c4JIfnLD' '78ctRkXnUPqczXVHSqI7ywsGeepNwBEOEo2iJgsvT04tik025LeFLURdxZmUqlRPKRmF4RZnnH' '+l4maT4tdnzRPxahDZCqhMXh/7hlHYBEQ6+JjJqS02dE3vmLFXx5h7cCrT+gEzAd9hMIRhARP+' 'xsimQEHSKfVleVORzEfIVYDn+MjGjTFZG//wLTdY++ZuSx7fol7NnXjD2RIY60ddD4bpp3X8rn' 'QWW1YLIQZtK9ySYlx0FEvcR+hTrSUY2gyhvhiW1zcjnpXLsxiLF+Yxxnr3SSkEy02oaaqzedKI' 'oNkKbet9HUJlLAMHY+vteysUJttoJWLTZBFjGStW3iiY3+pcRc8cRG/1JiXXpio38J1uVzHVAO' 'IAS7/lNWYBn1FaC6Yex3s/bEErqZzyWFc/b4zL1czRUXcLFXTnBFjXo+1xTV79raEY5Db44yWd' '1iS4x0Dgcb80c9WvKv7NRpK9UXrPRZNanV0tQULZhlro/JX5gQQdFqQILW4zzjtPkZSpOjB30D' 'PNY+gWh8plFP4rZcL9sKuFuPdUO9WocpQ1tMJCF4PSoMY8KZEPgSX0lPCHyJr6QnJKN5XRRv0B' 'Nf4itZvrjz3pzAsuobQHXb2JtzMiH26CxsJjLOlpOINFdPoTlLjojotc2A/yyDY/putNYEsA6T' 'e5eiUyblrIA3Z1my2siyNop7y5rnsKO/shKHNvsDtgaJ9WEsdEkMqZN2q2neOXKhkzs4936nMr' 'SE3qZadr2pezEJlp6zqHF2K/alPunlajhrYTvxF8cnjHccINmaUOAosLntOQxxbJIuBsh1VDN3' 'cJ6+kZ50+CTfSJwGT3y8b8BpuMYB5QC6XmIsGlQAaEIdd0ADAN2sXsBZeh6/9iT6mxvbr1eQaE' 'qc0kGbrOssPYMAbuKTafqQmP9kmj64iU+CvikHxB3dqI45oAJAN6lZzoYTkG53s5rxvm60SZ/6' 'Hrp80dhfZp9GeI89vfRap8Bj22lLn2fcXw31eGGI8sEhAtbtNr5HBPW7EWh3RLaRgI8kzy3dOf' 'V8j7NEiJhXdvi8WKsAzlaRc2LPl1qlTnkDTVQtsoY1tbJSGTh3FsTu0zKJYqSb7TjpPN13bE4m' 'Ze6Q/9AkQ4R22ipyOLSVbc1T7rsBk0ausTrz22eY7YLyALnzC4f7e5jf6x1QDqAJCeF64nATaF' 'Ld7oAGALpVvdA7yyBcfPgh+vt/cS7yQt+W8rLbhJy27lQ9wrh6McHtGPS1CcI5QNv6vAVhZ/8H' 'BOSHSzfbXpJCCowHmMU5m4TCqgcNY6fr6L5FRT0wssEucBbgIbXbO+WAM+pHaFsc69dFDkpHOI' '8/Kcu1sMk7iM37FgWotYrJ0LbYDL7dXeAswEh+d/vOqn/Kcv7tre6ga3BWIKgioKfq7dAWeeqS' 'DLcTjIXxqS4wd4Pc/RGZ6Lz6+Rz2GzvziIYwyHNA/QCZw0lPoiE/j+9gXeuAcgAdchQboiEEch' 'UboiEEgmJ7wqiNfvVadFga+3+yiSl3Muoy5GjJcg2oH8eQIyUU6dphkz1IxSR1S0qla2extTzF' 'iU2odeLmAVpLkE/qfVSjm0TsyVTNKutXnYQk9y0y99paflKbK28xgoYP3nbG0k1L6mVz/92qJ0' 'dlINPttemZRabba3MpOwUnxQQyMU8NygF0UF3t/WWfwAbUgyw3Y5/p8xf1vQcpI558tS4VGsI9' 'LxgfqJjXIS7c4fslKStesq/ovF3ONDBVvKCum9tY7uQpwLfwK2dn/Hib7IsNHbLa5peSnrhQCP' 'J1YAykt5q4hwwyGk3Fn5quokZ+vXYxUczHP9k9qK1Q4j3sxZ1HemzypT7TE5R7coeED4CDFjWX' 'OxEcnuoujujt0JnmxpbOSdFJPc5tFRrLanAh4ixArSaEcPkYYHpLdTmq/eaLs5T2waiF9B6t4G' 'wWJ4kZcj6RRsw1HWqhRBp00ACZi71ioK/FpGxqnUdC3i0nxHTcjQ2Ziw+mpRSZiw/mUhsbDt0I' '5BpWyFwkkGtYDZD+eTCtfwa05EL/GBVYUG9Ff8nmh4S/t6ZJQMLfW9Mk4KTqrSDhOgeUA2icDP' 'oExOhvoI00AQ0AdAspXUPCoHoorYWRFvhQmgRcI30oTQKOdB5KcwFpgQ+luTBIJDyU5sIg7mul' 'ueCph9FfQiaSch5Ok4CknIfTJMA1fxgkXOuAcgAdkiQODSoAdNgZoUckEOh5RNX3jE+/S70DHR' '4b+2rGn49T9cCM0N/h+fpbfRD3SKtP8p7J0IfSb+NWlaQZVvlrlfU22if3u2xA29P+EL7bybaz' 'a+dztK/etnuEsT6Qm7VGXntYu800x/ue3wiDuO2mWvLtLmOUcE9mCNrsbKRcetTjeEea1SjI8Y' '40q3Fx6h1g9ZgDygF0QI60NagAkK+OOqABgG5QN3r/zrB6SL0rx8cnr/T1Jxhik1bHZ4f8PQbJ' 'pzThix1LtZVT7ryntSI1XuuxD4Fs5oYbnGEP0bDflR72EA37Xelh82WvnD140aAcQNc4S2+Ihv' '0uyP0tDmgAoKPqed5bzLB3q/ehw4mxX3SiRpGJLvpVcTP1FyBEt9X5NwdF2c9EfMR5xdtprF2W' 'SVkrVZ0QaNSpw4ndxIn3pTmxmzjxvvTWjDtr78PWfI0DygGE5X5KQMPq/cA0Pnabbz8twczvIf' 'M2Q0lsQi1ioTiUDRNl709TNkyUvT9NGa7CvR+UlRxQDiAUsXqzMfRG1IdznBjy77JOiM1fxIc4' '3E2a1x1fqe0NvsH6nTFZ2pzURHND5h7W1aHyIe048XeA4yqOZkwRWp0/FZmpNbc54iPx9sZK1E' 'C8TTv8khTdTvy0WIuANu0mdbYjk2jPTyTxXR/4eE/Xje0l4ecIzh/TLB7B+WOaxbgq+OFcKqA3' 'gvPHnLqa5OEXjIAr9TE9+5uJfG+ubz5buUbTHjnxdpDnWZk7ZK7ik8DOeBSN52Pp8Sgaz8fS48' 'H1xI+lRUbReD6mReZTZjyj6uM5Tmn/UIadMWdaOOajixybezpa2EmB7TgOS7WXkN0z2fZR2mZn' 'i9u9boAlBZlpo66sLUZoiXD4Qf/qQbigPEAuP3Cl8uM5m52kQTmAkN36p4YfRfVHQFUe+8Q/gx' '/muzSWMV7vfD4jY5JYsMsbzzLnWfGmiAtdad4UcaErzZsiLnSBN+MOKAfQDWrK+4zhzR71aa1e' 'PvJMvDGzipS8DvkLP7moSFb0TyQs3HWvyt1DPPl0mid7iCefTvNkD/Hk02l9sId48mmtD35OQH' 'vVZ3NcKKT5ExUK8exhU7potDEMSgsmi8CcPrlVRZgAGsxn04PZSw7/Z3O2qogGZQAyVUU0KAcQ' 'qoq8Xk9wXv1pjq+JPvDPLivyk49Lm8uoQULEmBokntQgYdCoA8oChBok+hRrUP0ZRjAsWAYJy5' '+BEbvllUHG0gPKGhDSR3ap/zunnqN+vU9lGCusQoIU1OXel/L8GxG0J3Ich/1sHrsAu1jOuWZy' 'p+aoCSyhlVuWYDV1IdKELZ1tkFvYw6opWMjEwpU6l9ezwcsu7J6gJ/8RJ6VSLyE5665rKzGpva' 'urnxzHqfmh2MeNIw/RUvIi+doo4qer4RYOxcOg3WnhqBr7PWYaez/b7XwZodZVb9jelTFR/vD+' 'gCsDpzIJfNv8zijyf1bXPJe1f5GPWfm3M7dv020dEbwZE7AR3M9PXp1O6g6dxA94KDpvAmww5O' 'lrGLc5DI0lLZabulPl8c3ItPCzv88pd2bcCBew62+s+tt0vVc2bkxyy4okfMf6gChmv6j7ng93' 'ecLeBBEPyJzkasWok4/aW5wO0G7Vq7ZQP89+iKKLVYmU2M0ldXFQqw8WbtIoTyQaRYPyABmvYZ' 'dEfp+A13DIAeUAOiyRbw0qAGQi3xo0ABAi309mBJZRf4cO7xx7POPP1uPEXXLCPRKNM58o80s1' '5+Cp5JvPlJm8Z2Ixl8FfJflsm4vb+ijBYDLZPObIlPWXPqAkQSLj2JZuFQNNljStmXrYus1vhl' 'sS+dHrLLgQ1Y0kyQmcQ2TJYTFONP8uzWKcaP5dmsUZzReljjigHEDHRI9rUAGgm9WcAxoA6A41' '633HsDirvo0Oj47998T1N4viknn/zsr7MV1+8fi9Z+3yO4vFsAHnZd9Ocxnx+G+nuQzp+3bi9W' 'tQDqADsoFqUAGgg+TiJ6ABgA7T9LyrX2A59do+hdqib+yHNWNv0xlO65WbTsxwzMZgky+ybGtV' 'IhxEoHXTXLk0VwstRKtRH5AXvjjcxifSJn3+fg/+fBHgy1rsb/eP3uYlVkrNvQ7ZiKLzMRdLMu' 'iE4NPBJmcF8zf5jIZ2tbT5fl9aLyctgoYvZPnnw20hoqeJJVg8vdv9Y9Ls1fofqxTTBHWNzvPn' 'u0oGcV6kTjCAInQCJ3peDPm38xZuZXUFX0qBvg1oIXMYAnNTTy0IG+EmpOvY8fGMxqhLZcjFE8' '1w59hz+uw8G098OainwBEfcpr8KS70jrzs+qpvb1frtbDzXVC+y7SwNHfcFJmWMLA1p7vK+tNG' 'xvkbxmxhqdL1cD3jbOuL54JAFJ3J86tvpALO+lxAHBezsUhiorvB4Oicl4kLygPkrkscnRNISd' '6+BvH6Qlnz6xk0qF4PPLtLV3CiAw7slu1xJW0i2pzcxYbg64F+SHBpQ7AHlDWgqwX9GzT6IqNv' 'Bs1oOYiX0U2COYNGLprMTqCsAVVkLH3q1/p+mhUGGafB6oL6AdrlqDKcWRPooLOH4MyaQKbCIG' 'Kjb+y7VBUGd7F1T/iNdb9LrHsGjTqgLECw7mGXD6k395Fd/i1jlyNsSZCC2uu9N8u/YZc/3MfO' '+5uzzFX+CGci/eaEk5P7brihO01CDPggSa72LlK5Q9J5yd/FFNk0sy2sDrImjO9jToQ8yypcxt' '8y9kbayUVytNUHSVpMRLux1OoGNdgWQ6RckqnT2iZjJzyEUF6LY8J89bAW8s1oToLuwGY3h2JX' 'a6kYEhPv4URQNKgfIHOjaUhMPAKZ6kJDYuIR6ErZLIfExCPQfklxGRITj0DXq0kuU8WfiVC/gf' '5+q0/KVJlPRxAUZaqutSBM4tv6UPlqbMQGTTa4SDkftdtWuMeEdt3gLMC7yQfc44Az6pE+mylg' 'gQZc6AJnAcZidVFk1W/22UJpFoibRH02DyABc2vkAfw3I5sZ9Sg4sH/s/8rKiueSCiIEktyhP+' 'WsnT+r4zdbqCSHTUhsS05pZv2GCzhwxKzP1iOwWorgQJX9SiAGCXVmsMPFwSecTMwE9XtNqack' 'oy3UYqmdDbmqGrRatLlygXgu28hblU3+a3SXwVtpRCtlf94Ur5jUu4g5s8QG0tbfeuH8QD4G1c' 'aiNqvl/FUzzamaZmQOJvWjaZGGSf1osqkMyZQ/ik3lcgeUAwgi/fa8wLLqo0B109gv53mu9Md2' 'bUaYhJnCJCd2kQ0pzTQbn5MLC5FUZJGaGe5+Cn/ffnyN+YH3brnZX+El3A7J/2jwdKzW7zd1oD' 'x/nB7dcvOk35F/Y/mXGzFA/ppAGR+nAqsZiP1irqfrwInI8By649FZXMal4okgKzHSdUlge9c5' 'Q0snk0GE15GwKmlWAVlIZCglRWukGowEw/3VRqRNd32rIekW0SPWnNt4aj/naz0KGQQ7xCk3kT' 'A7xMsFGs+aN86zks5/LpliX5yDEpKrX49azuUeVj56rjzffnWYb3mnLDf7KZC2DoOklLutFhDJ' 'ZLuZEDq3U+d2ExjHHzonH9kLZXIS1svzkANrQA2JY/PRtKxzcbg+G8AcEt1EoKLsskPi2Hy0j6' '9xJ6ACQAckHXFIHBsCjaujVn1n1MfQ3//pqu+Mhg6QVp20IKjvP+jjJK8rJEbspDfoO4KXua0J' 'yR+kVbMpMfgHfZzEVXbAGfVxjftKxt0jqXEX9ox5Y3cXOAtwN/as+uRFsCfZ8i4a0PPJXuyCCN' 'i/OyQszanH+zgN/MtDJpvHuRe0Yl2yRvBAvbF9h++fCh7Ytjnc5sxXTKop8NFUUtc3XhC3kLo9' 'WyYVVKf1Op4nX1VjQ0X3Nqn1Up0LqEm7Q3FSRYy1r9wdF/qQyy2OgbZVdd6R3gB09FFWUwqrKP' 'JqW1/2SPAxsVJJj4ve6TCOCSdK5j+GH/C1Noy2y2dCjGu1FYb6BII9PVvHhg065AatoZRXC+zd' 'trfO0lWW2ra8ljFgzXUwm5bq2U0qdfABxzHurK2FsSmdlIqwBfwhOFh+9VBXKgvYtwSeFD2pel' 'xcrzpqSZjXURgr5KmfD0NdThBlBtYxFyQREk2QD6qksijrPWrJJDX7gU6NZYrlO0y4Ebcqx1oI' 'kTpnNzTLt3l8jimJ3lxGigPD+GRM4FwqJr7d2WlhGmCgQNRQvWYK35ex34fxnM7qLj3O16g0wb' 'dxhlbbZIKazoCNVTzGrmN09kaQ7pDFudpp6auSvJM1dDmlNEIIfb2Jkmd8pYrrCSEpWkp5aLEk' 'Lrpx5W7vvTfWUF0Pq+dteSJjvumbcR5vkDT/qbtHNEvAjOp8NCSIxXw8r9ctcn7HJ4xFl1rdHv' 'fdCnFbSAskF0qSuEF6KSKmzx/q5CWQosw5hmiGGDC+nWqu2EQmS3IHnOz4yD3JSCeI6vVdb+pi' 'WbL58XdY+aIHGDOJjSEMrOm52WltRjo/BozxzMqAEdPs3nElysvsjp+W356NydvKU235IFK97X' 'LcHI04eXvO3BhtmSaDUcsnWg/r6zCHhYw6f+03RQoHQQ9zev1h7+mapXWT0We6+lYqoLgOoxsX' 'QIXjjpWAMMvjaSsBYZbH0xYxwiyP99miqkMSZiHQFY7hgBsKj8NIPuyABgBCKaFvZATWp/66j4' 'PM/9XNL4M6u2QhZhP3j3+yALOvq6k+q5wyqRJuGNBnhuuC8gC5/IXt9Nd9Nrw8JNGWv+6z4WUN' 'KgBkwssaNAAQwssnBJRXT6C/ybGjP/6X5gxapJ8/kaY6rxG7VCP9/Im0VCD9/AlIxZUOqADQmB' 'z4aNAAQNeRoFQE1K++/lONdDFOGsXX06Po1x3tcriKVOuvJ5EuDcoBZCJdSIf7+0sW6RriSNff' 'J5GuIYl0/X0S6RqSSNff60jXcxk0qJ7UgccR+RJbzb+PjT5Bi3jmk0mIcUjimT2grAEhgLZbfQ' 'cBtO+bABqy3b6jA2gV/gnb/amf6lTtlljTU8lU7ZZY01PJVO0WP+CpZKp2S6zpqWSqkK/3vUs2' 'Vbt5qr6XTNVumarvJVO1W6bqe0lQclj9T/D0V/LCU+Tp/c8+vkhb5Z/g6Y9AtT9W0akC6ZwZkz' 'kQ4EgcAfhN/VFxCUDqzynxBzvbyVUpz5QOZrKGhck/Spg8LGe2P0pW9bAw+UdY1fsdUA4g1G3+' 'WkZgGfWLedbiX0y0uNS+uoTnhPqG5KXV4XzI7rANQSMeqwvKA+SyLaM5YlT4sASNCGRU+LCcwx' 'LIqPBhOYclEFT41QwaVK/JP+2BwjAv7dfk7ToelqXdA8oaUEU6y6rX5X+aa3dYAg2vS/MH+vx1' 'ebt2h8UPfl3ert1hCTQQyKxdZLT+cv5Srd1hXruE36zdYVm7DBp1QFmAzNodUb+Wp7X7G2btIg' 'GUIAV6/MMM/8bifZNeCl/rWgraVbzkC0L3c6nPzp2CdzL5I6JT3pRM/ojolDcli2NEdMqbksUx' 'IjrlTcniGJFDgjcli2NEDgnepBdHRUAZ9eBPVYRHZIk/mB4Fyo09mIjwiCzxBxMRHpEl/mAiws' 'gYfviSifAIi/DDiQiPiAg/nIjwiIjww4kIK/U2iPAHjQgj5/dtec5VezzHvyHCj+b5zoeT+JHc' 'Qb+E8iudXGrhNdegyt4xWqTwzo/7upKZrRVz1DdFYm46ZqqgJV8+0Wb0odi3hnTl7AyyDlZbtN' 'fiEJ6cxJeigE3UiNYgbfwJsYgcNPFcY+c7VRF55iS2jQthLGkEPsr68K02U8FXB3/4fhbX5l7h' '22BoVgurdQnfmLO+sxJIAqITumaIiLeSRfpoIt5KFumjySJVskgfTRapkkX6aN7ebFGySAlkbr' 'YoWaQEws2WioAy6rGf6iJVskgfS48Ci/SxZJEqWaSPJYtUySJ9LFmkSIP/wCVbpIoX6QeSRapk' 'kX4gWaRKFukHkkU6qn4Xi/RPzSJFIvrvYpFe5v3XHP/GIv2EXqSPu9lZHGK7xMlZ6OPS52bJ/e' '3/v63QUVmhn0hke1RW6CeSFToqK/QTyQodlRX6iWSFjsoK/USyQkdlhX5Cr9D/kWEYDtv/Izr8' 'dF7l0ul+ErOthVO6JMIUB87HUUcAkVSa47uWls5iTTeCZjWc0IJRCzc2I0TNJrnUXFOHu+7QbX' 'Fbusb3W7sjY0k09OTcEgRnRVcsoJ48IxI6nfjsOed50p0NzpoTh66DubMLi0uW0TqdgMY9oK7g' 'c3sNwtL6VF71qav4jMYCqS2Dr+gCZwFGwdYJB5xR/wlt95X26pQnXNGzVHopDBnTeE8XOAvw5d' 'TfCx1wVv0xty0dcrms62yaQoJc4EVPV5zuC4Tx+8NdYEaLzxgWRUgy6jMQiP+clzoVo6JzP5OW' 'S+jcz+Rt4dlRGQ+BrpLsjlHRuQQyBUxGxfch5P1yX3BUfJ//DCqu5Z1jlMn67E915xgVD+Wz6V' 'HAQ/lssnOMCqc+m+wco+KhfDbZOXBh6E8u2c4xyjvHnyQ7x6jsHH+S7ByjsnP8id45fhVbQ1F9' 'EVvH39HWMfaPWX/ahn3tkT3UVGDjCQlX7QGPZaJcJNVJ7TimD/RtfTMkqSioP15gLvnbNL7jx8' '9KXUbc3+H7TLZMbBQ1TF3ZWJQtn+txKUMQOOt8d4Pvdcbl1CX4LhLqzdSXOvQbuladnHFo+hK0' 'x48LivEJraMIk/40TVezmWhzeykan5iQw00udMPL7JxbCtLWizTFJnWZNFyT+mKei/z/WZZ/o5' 'r94xCb/w5d+3Gd2eMWj0hVmEyOFLmIqNTIsXOpizWvSeEKnA/VovaUKTVVM7nq9Xg5KY5T119+' '8eurq87bLsqmU2bSH6+FJBSm/I3+OhgmLCUJSFuLu5NFUathjmZg8mf9f1NajaLSpM7Refkk/V' '4JWuWV4AGCgRgGvbJzv23iv9qhyPPxenlc3pkoo6Ws6KJUuieWevKFyKKtdP/foOpKrOoskFY/' 'g4e6wHmAd4sSTsAZgPeqA13gHMAoCux2mFF/BczXpNpCaf5Vb4cIGv2VXsppMCPBFbI0OAcw6s' 'SNMBij+zKk6IBwQY/sy4leK0rU9MvQa3scUAagvaJVijIaAiHFg7//V+ShfBWoDuH7f0vpc+6d' 'JXQSs7+1TlKH9cHZNmxsRudDqJKWh+1KlwTm8qtB7Nc6LZ2gJUd2c3LfR74IqNWC5A/LBwaToY' 'GtX02PFiz9at4mtBSFnV/N2xutRWElga6jfc2wMqueAKYJ2wZbxBNp5HyUkkYOLj0B5Nc6oBxA' 'KLhjkOfU3wDTuG2DQ7a/SSPHIdvf5G0mpQZlABqVi+UaxLhQvt0g71N/C0xJmz4D8hxQHiCXcp' 'ww/W3eXlTUoBxAroTl1dfytog2Awj519LI87qVSzkOgr4Gyg84oBxApoh2kTfcrwPT9bYNn8+k' 'kaMUztfTlPP5DCi/2gHlAEId9schvnvUd7ADfrefdsD7/LlmNdiMpYxxvalvhMntwY6kupsP7+' 'mcWanMh8wASWJDmfNG2FXl3N8KnMJH5Ki84qdZODqhhnPSQLjeUHDH9Ds6XvOaUf7N1fj7Faq6' 'P6Vowz8VScncelKDO/A366HOz0ijTQoz8qh5wLgE1SLVuxk1a1Ke0TnfTgpY20tQDlfrsdRslU' '8rJd96oh/zs3P8DcGafHgvxBFs+rZnUnFASh/WN+rUK3BFDfuZLCmeOkmeAT4IJXfz9BDsjZSL' '3hMEb6XWHD0y10te7fmnQr7SGEXnUT+Zy20nqdvJuBn706G6V+6q3Huv/Qf/d++9eBjIw5Uq/0' 'O88Fd9f2297sEftYWjbckrokfPp762E2+ShelzeSs//Z+7X/r+vwkm6xP0j3/zpH/jpH+M/td/' 'ObeDOt9ajxq9AyvLiytdL076N+NdvNgIVsIGuX8y+gn9SnWy1vPK88wr+iulmk3SPpxc7Wl/1L' 'TXZYaJn9J4bXK9p/FNtrGu0Dt+dMJ8lQdsmqJlYNgmeS726wM2R1qSptrk16/KV1QlJ4QLUPqu' '0OvvREp96np7wrn/1zFJaboCIl9roWUm6c+xLt3u+4gy6HSrsFltRHG6RqtcCtS2GPKgXCHnbN' 'B2vZUUOObU6Op5f3wziuP6SsMWcufQiUlnSmw4p+i8NmO56LC+0CppQZZdWyjfreWLuWaPEUuJ' '+1KyXOSQik0W5ppdTc2tMqbhtKHFCnHipdoLlejLMFRnA8cmHdhUsLX8cz+mw1+13Gyxm4+OdX' '11O3wuOCefsvA3opijNtHKhXrUiQ1zzQdl9dhqJeFrsIZUMVOl2hQ2d2tyu9OQ/uQPPtyL6q1S' '89+p+r3DqNOieijWy9sktumrVlxqWqQKmT/aBJe2WlY0RSIuznhCFOVdTdfGdBlofDqdgAgsKy' 'FthSxGYut1c0Zf5Y7Xg5Z2lbqqxptENV3tmt/hQd6t86l0Xliw04jdYcbRhmSPdbcEZuuoIoXV' 'N1/LYhRwAonaYIdF5JfWWlFnsyTuOStJLnMcaA2FkTkfAbArM/X1pqTIbiLRQJRsmHX94cy2UX' 'w6Ex9IpUZkvcUeMhm5NmHWfvqJGDWTFHDTnyfjSzpibTvLSOxi2rtXghWdIEuDr681OdDIZeM5' 'DktdRqbUjhMo0dV4cDF6EqY43wjRieVIpbO9VHWZOF9/6amKxLyk+i5fFRKTfI84IGyIuKB+gI' 'wDskccEALtlYTyPeKAEAgfcCkyCD7yI8D0nX7JHt8jbh5B4ea9bsDCYP78fj/5V5Nj3+13vzsi' '3z9AGW+R5YtZceZGv3zNzbMM4E8fOJdZJHLtxKHlu45a40npQ05ZDLQKhRmp78/jE9o7UmDqPE' 'FdSV1NCVkmdhavcWeF4xYIoWeLgHfGm3hnvJW3US9R3Me1fm6EZVMbAZM8fhNtqEeO8HvmZm2Z' 'RzV+64S1J6gBUNoG2JfHk8fc4GiS/mkW9w5DTHWuK0S4LLyZqbT7cjd/Ui/f7t+MdOJmTzNNfy' '/yY2nkO32vyDfXgY8J6h0/asRmSQ/6ozsag9xWrhcnGkNXXGCxSL5AbffdKPlwqERt5ldh20o2' 'lxaiRoDcUS2MPZOPmbei0bV5pw2+ZAcYtzVWreb3zHLRtp5RZnx/yZyfbtSrUSNqTsj1hj1OcI' 'XX4lAXOA+w+XLfHie4QuA9ElHf4wRXCIyIehpcAPgqdYN3RRpM/js9OKAOe3+ZdZ5k1Ce1WvhU' '1txaXucP0ugoA5K9Q/0JkU7LmmvHpfB/gxTCpPxNQ+1sNCfxxcgaP0js30knxzmI4w4KOPDujq' '9oW0QTk/yqxmO/doMzK7mcRgreKYKvZ4kv0skdveo2iUOSRQ6cWlI1SntQBZQPhK1oSh+xwICx' 'Wf6op8+7jRTUxw0BD6dokbZV5LyqVo9JE23XzfefO/oitjsTCL18sneWEX75ZO8sZ/REdM8ywj' 'Cf7J1lHAt8sneWMzzLn9Sz/NiQ8ySrngQpE2NvHrJfwFhkFxc76Ty5puloqa1a7awCUzU/4Np0' '2/B/N8R4ZwOpnhznsZXA33DWOjpZSRIr4BdMSrv5jpNZRK4tgaIwXChb9lw+MujtFxc9aScOqx' '2+zolmsa4pjLKFLH2ejrJ1vaX3fdteH9O2JVTPH9y2rgd3CoIaZEsTcBnfdwbdy5YgRoBLJNhQ' 'g7VWsLnOZNsGLJiaAM8waxynUjDUaARNfUejHU3oQwJ9v8Ksu7LeZi1uvjhjgtj4viaqfXUPJk' 'o4lmzQ2jZJ3BNb5WCBb0StJ6/IpSW3gtJt9uFG0DqPFaWPEI4cmdB+XMzfqQ7Z4RALU9vFhg+T' 'hoeQh7YUe2OhwReRSG7q8Xkv+faKQderhdmH5KrIEIwouYWrIxJku5EiORNuMU9YcuUqd3INnL' '/qp7/eZL41k9qsuI6QDVzwfj4jw3cFF6F3z7/oYwTld0BLYL2j7rQ/rgQP0MObbntatA+YXqeb' '4gqAEz1tngbHKzv3C45nwmRaOh9a76zQ2iC4NjkEwawsDCsn+pNprTXnq+EQeCMEyJloBXW+lW' 'NERFDpXn3zvvvh7pZWRSuNoHleC71ZDXLdWVuVjAYuTPmZyUuWln+svOOc6Ga3+8/Ts3LYP+EK' 'tuUWm4OH9bc9eNj+KRmrEe9YmhghFwOm7B8+8rSYxW2hN4lO1GKVF7oESz8kQm+xsyIZJ36ta/' 'hx18aFePmTvRsXIt1P9tuznwScAXivurYLnAMYIf49DjinvgXMh1NtEer/Vm+HCPd/q7dDhPy/' 'hQ6v6wIz7nE1keqwT30bmI+l2vYZ8FAXOA9wd4c4Bvg2OpzqAucAvlEd9b6NgPpe9f1+ZKwPqA' 'xyQ5JPm2pN29C+4np9k2a7vYWLRen7fzpogOpo6UC7+XLNdPLlCrtnu9WP4jiq1gN7BGk/1WV7' '8dzIfZILYT4+w5Ywf+sDYpskystLqfo2OsiO2oc05oK60vtZ/gkn84f9XPT0Ptwsm7aZUWZvi3' 'XQgqMZsJ/D+3kz6uIEBxfNZuWZiI4pWUaLyCFGPOu9Ylf/MPGs94pn/UN41pc5oAxAl0tV5r1i' 'SxMIn8MsMgie9T8A0y8PiGe9Vzzrf4Bnvdd7c8bCMOh/0ib0z7mONWcap/fb7iMUdxjOxxE541' '4fFrAxYGJhQbfNVjaHGGUdJmP5tFQRN/4pEfIEnAfY2J8JOAOwsT8TcA5gY38m4ALAxv50wPhO' 'h7Y/l5wHGfULA0TJtWN3dHOI5Ynr92t/zHw5a0dOdY0QFjbjHeoC5wE2yzgBMxV71cEucA5gfE' 'j7VQ44q14DzAfG1ropZodFmx6rCMTR3OJTOTa8mZZl+aABr//krolzyKuzMLpGBr31mt6RQQW/' 'ZqBn7sBfAptPyyfgHMA4sP6yK6459Qag3j/2uUyPvEoK5LMZma/vWz/NyBiLLnMVNs1v56ga2m' 'kziNuO047MvwvwvPjrQ+Py5VJd8sQ43Kw0bmeUU9oSnOhiHzaUN/SyDxvKG3rZhw3lDWDf5V1g' 'ZtSVauz/A3VwEtY='))) _INDEX = { f.name: { 'descriptor': f, 'services': {s.name: s for s in f.service}, } for f in FILE_DESCRIPTOR_SET.file } DiscoveryServiceDescription = { 'file_descriptor_set': FILE_DESCRIPTOR_SET, 'file_descriptor': _INDEX[u'service.proto']['descriptor'], 'service_descriptor': _INDEX[u'service.proto']['services'][u'Discovery'], }
luci/luci-py
appengine/components/components/prpc/discovery/service_prpc_pb2.py
Python
apache-2.0
29,794
0.012016
#!/usr/bin/env python import roslib; roslib.load_manifest('master_monitor') import rospy from rosgraph_msgs.msg import Log from syscall import runCmd, runCmdOutput from move_base_msgs.msg import MoveBaseGoal, MoveBaseAction from geometry_msgs.msg import PoseStamped import actionlib import numpy as np class ControlModel: current_goal = None level = ["DEBUG", "INFO", "WARN", "ERROR", "FATAL"] kinect_recov_launched = False move_base_client = None control_model = ControlModel() def main(): rospy.init_node('master_monitor') global move_base_client global control_model move_base_client = actionlib.SimpleActionClient('move_base', MoveBaseAction) current_goal_sub = rospy.Subscriber("/move_base_node/current_goal", PoseStamped, current_goal_callback) rospy.Subscriber("/rosout", Log, callback) rospy.Subscriber("/rosout_agg", Log, callback) print "Master Listener Node Launched" while not rospy.is_shutdown(): rospy.sleep(0.2) check_system() def callback(data): global level # print "Level: "+level[int(np.sqrt(data.level))]+", From node " + data.name + ", a message: "+data.msg.strip() if str(data.msg).strip() == str("Connectivity Error: Could not find a common time /base_link and /map.") and data.name == "/move_base_node": kinect_reconfiguration() # if data.name == "/sicklms": # print "Level: "+level[int(np.sqrt(data.level))]+", From node " + data.name + ", a message: "+data.msg.strip() if data.name == "/sicklms" and str(data.msg).strip() == "woah! error!": kinect_reconfiguration() def current_goal_callback(data): global control_model control_model.current_goal = data rospy.loginfo("Current goal received and stored") def kinect_reconfiguration(): global kinect_recov_launched global move_base_client global control_model if kinect_recov_launched: rospy.logwarn("Kinect Reconfiguration has been launched") return kinect_recov_launched = True while not move_base_client.wait_for_server(rospy.Duration(1.0)) and not rospy.is_shutdown(): rospy.loginfo("Waiting for the move_base action server to come up") if rospy.is_shutdown(): return rospy.loginfo("Canceling all active goals") move_base_client.cancel_all_goals() rospy.loginfo("Launching Kinect Reconfiguration!") #Kill SICK laser node runCmd("rosnode kill /sicklms &") #Launch kinect node to replace laser_scan runCmd("roslaunch master_monitor kinect_reconfiguration.launch &") rospy.loginfo("Kinect Reconfiguration launcher has been launched.") rospy.sleep(10.0) rospy.loginfo("Re establishing model state...") restablish_state() rospy.loginfo("Model state loaded...") def restablish_state(): global control_model if control_model.current_goal != None: goal = MoveBaseGoal() goal.target_pose = control_model.current_goal goal.target_pose.header.stamp = rospy.Time.now() move_base_client.send_goal(goal) rospy.loginfo("Last active goal re-established") def check_system(): global kinect_recov_launched if kinect_recov_launched: return # print "Rosnode list" rosnode_list = runCmdOutput("rosnode list") node_to_check="sicklms" if rosnode_list.find(node_to_check) == -1: rospy.logerr("Problem: "+node_to_check+" node not found. Launching reconfiguration") kinect_reconfiguration() if __name__ == '__main__': main()
aslab/rct
higgs/trunk/code/ROS/metacontrol/master_monitor/src/master_monitor.py
Python
gpl-3.0
3,374
0.039123
# This file is part of eventmq. # # eventmq is free software: you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 2.1 of the License, or (at your option) # any later version. # # eventmq is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with eventmq. If not, see <http://www.gnu.org/licenses/>. """ :mod:`client` -- Client Utilities ================================= This module contains a utilities that can be used when acting as a client in eventmq. (e.g. one who requests jobs) .. toctree :: :maxdepth: 2 client/messages client/jobs """
com4/eventmq
eventmq/client/__init__.py
Python
lgpl-2.1
933
0
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests lesson 04 task 05.""" # Import Python libs import unittest import mock import random class Lesson04Task05TestCase(unittest.TestCase): """ Test cases for lesson 04 task 05. """ def test_blood_pressure_status(self): """ Tests that the correct ``BP_STATUS`` is returned.' This test will try random numbers in each of the target ranges. """ levels = {'low': [-256, 89], 'ideal': [90, 119], 'warning': [120, 139], 'high': [140, 159], 'emergency': [160, 256] } for key, value in levels.iteritems(): systolic = random.randint(value[0], value[1]) with mock.patch('__builtin__.raw_input', side_effect=[systolic]): try: task_05 = reload(task_05) except NameError: import task_05 self.assertEqual(task_05.BP_STATUS.lower(), key) if __name__ == '__main__': unittest.main()
Logan213/is210-week-04-warmup
tests/test_task_05.py
Python
mpl-2.0
1,085
0.000922
#!/usr/bin/python2.7 # Copyright 2010 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Create an API definition by interpreting a discovery document. This module interprets a discovery document to create a tree of classes which represent the API structure in a way that is useful for generating a library. For each discovery element (e.g. schemas, resources, methods, ...) there is a class to represent it which is directly usable in the templates. The instances of those classes are annotated with extra variables for use in the template which are language specific. The current way to make use of this class is to create a programming language specific subclass of Api, which adds annotations and template variables appropriate for that language. TODO(user): Refactor this so that the API can be loaded first, then annotated. """ __author__ = 'aiuto@google.com (Tony Aiuto)' import json import logging import operator import urlparse from googleapis.codegen import data_types from googleapis.codegen import template_objects from googleapis.codegen import utilities from googleapis.codegen.api_exception import ApiException from googleapis.codegen.schema import Schema from googleapis.codegen.utilities import convert_size _DEFAULT_SERVICE_HOST = 'www.googleapis.com' _DEFAULT_OWNER_DOMAIN = 'google.com' _DEFAULT_OWNER_NAME = 'Google' _RECOGNIZED_GOOGLE_DOMAINS = ( 'google.com', 'googleapis.com', 'googleplex.com' ) # Recognized names of request and response fields used for paging. _PAGE_TOKEN_NAMES = ('pageToken', 'nextPageToken') _LOGGER = logging.getLogger('codegen') class Api(template_objects.CodeObject): """An API definition. This class holds a discovery centric definition of an API. It contains members such as "resources" and "schemas" which relate directly to discovery concepts. It defines several properties that can be used in code generation templates: name: The API name. version: The API version. versionNoDots: The API version with all '.' characters replaced with '_'. This is typically used in class names. versionNoDash: The API version with all '-' characters replaced with '_'. This is typically used in file names where '-' has meaning. authScopes: The list of the OAuth scopes used by this API. dataWrapper: True if the API definition contains the 'dataWrapper' feature. methods: The list of top level API methods. models: The list of API data models, both from the schema section of discovery and from anonymous objects defined in method definitions. parameters: The list of global method parameters (applicable to all methods) resources: The list of API resources """ def __init__(self, discovery_doc, language=None): super(Api, self).__init__(discovery_doc, self, wire_name=discovery_doc['name']) name = self.values['name'] self._validator.ValidateApiName(name) if name != 'freebase': self._validator.ValidateApiVersion(self.values['version']) canonical_name = self.values.get('canonicalName') or name if not self.values.get('canonicalName'): self.values['canonicalName'] = canonical_name self._class_name = self.ToClassName(canonical_name, self) # Guard against language implementor not taking care of spaces self._class_name = self._class_name.replace(' ', '') self._NormalizeOwnerInformation() self._language = language self._template_dir = None self._surface_features = {} self._schemas = {} self._methods_by_name = {} self._all_methods = [] self.SetTemplateValue('className', self._class_name) self.SetTemplateValue('versionNoDots', self.values['version'].replace('.', '_')) self.SetTemplateValue('versionNoDash', self.values['version'].replace('-', '_')) self.SetTemplateValue('dataWrapper', 'dataWrapper' in discovery_doc.get('features', [])) self.values.setdefault('title', name) self.values.setdefault('exponentialBackoffDefault', False) if not self.values.get('revision'): self.values['revision'] = 'snapshot' self._NormalizeUrlComponents() # Information for variant subtypes, a dictionary of the format: # # { 'wireName': {'discriminant': discriminant, 'value': value, # 'schema': schema}, # ... } # # ... where wireName is the name of variant subtypes, discriminant # the field name of the discriminant, value the discriminant value # for this variant, and schema the base schema. # # This information cannot be stored in the referred schema at # reading time because at the time we read it from the base # schema, the referenced variant schemas may not yet be loaded. So # we first store it here, and after all schemas have been loaded, # update the schema template properties. self._variant_info = {} # Build data types and methods self._SetupModules() self.void_type = data_types.Void(self) self._BuildSchemaDefinitions() self._BuildResourceDefinitions() self.SetTemplateValue('resources', self._resources) # Make data models part of the api dictionary self.SetTemplateValue('models', self.ModelClasses()) # Replace methods dict with Methods self._top_level_methods = [] method_dict = self.values.get('methods') or {} for name in sorted(method_dict): self._top_level_methods.append(Method(self, name, method_dict[name])) self.SetTemplateValue('methods', self._top_level_methods) # Global parameters self._parameters = [] param_dict = self.values.get('parameters') or {} for name in sorted(param_dict): parameter = Parameter(self, name, param_dict[name], self) self._parameters.append(parameter) if name == 'alt': self.SetTemplateValue('alt', parameter) self.SetTemplateValue('parameters', self._parameters) # Auth scopes self._authscopes = [] if (self.values.get('auth') and self.values['auth'].get('oauth2') and self.values['auth']['oauth2'].get('scopes')): for value, auth_dict in sorted( self.values['auth']['oauth2']['scopes'].iteritems()): self._authscopes.append(AuthScope(self, value, auth_dict)) self.SetTemplateValue('authscopes', self._authscopes) @property def all_schemas(self): """The dictionary of all the schema objects found in the API.""" return self._schemas def _SetupModules(self): """Compute and set the module(s) which this API belongs under.""" # The containing module is based on the owner information. path = self.values.get('modulePath') or self.values.get('packagePath') self._containing_module = template_objects.Module( package_path=path, owner_name=self.values.get('owner'), owner_domain=self.values.get('ownerDomain')) self.SetTemplateValue('containingModule', self._containing_module) # The API is a child of the containing_module base = self.values['name'] # TODO(user): Introduce a breaking change where we always prefer # canonicalName. if self.values.get('packagePath'): # Lowercase the canonical name only for non-cloud-endpoints Google APIs. # This is to avoid breaking changes to existing Google-owned Cloud # Endpoints APIs. if self.values.get('rootUrl').find('.googleapis.com') > 0: base = self.values.get('canonicalName').lower() or base else: base = self.values.get('canonicalName') or base if self.values.get('version_module'): base = '%s/%s' % (base, self.values['versionNoDots']) self._module = template_objects.Module(package_path=base, parent=self._containing_module) self.SetTemplateValue('module', self._module) # The default module for data models defined by this API. self._model_module = template_objects.Module(package_path=None, parent=self._module) def _BuildResourceDefinitions(self): """Loop over the resources in the discovery doc and build definitions.""" self._resources = [] def_dict = self.values.get('resources') or {} for name in sorted(def_dict): resource = Resource(self, name, def_dict[name], parent=self) self._resources.append(resource) def _BuildSchemaDefinitions(self): """Loop over the schemas in the discovery doc and build definitions.""" schemas = self.values.get('schemas') if schemas: for name in sorted(schemas): def_dict = schemas[name] # Upgrade the string format schema to a dict. if isinstance(def_dict, unicode): def_dict = json.loads(def_dict) self._schemas[name] = self.DataTypeFromJson(def_dict, name) # Late bind info for variant types, and mark the discriminant # field and value. for name, info in self._variant_info.iteritems(): if name not in self._schemas: # The error will be reported elsewhere continue schema = self._schemas[name] for prop in schema.values.get('properties'): if prop.values['wireName'] == info['discriminant']: # Filter out the discriminant property as it is already # contained in the base type. schema.SetTemplateValue( 'properties', [p for p in schema.values.get('properties') if p != prop]) break else: logging.warn("Variant schema '%s' for base schema '%s' " "has not the expected discriminant property '%s'.", name, info['schema'].values['wireName'], info['discriminant']) schema.SetTemplateValue('superClass', info['schema'].class_name) # TODO(user): baseType is for backwards compatability only. It should # have always been a different name. When the old Java generators roll # off, remove it. schema.SetTemplateValue('baseType', info['schema'].class_name) schema.SetTemplateValue('discriminantValue', info['value']) def _NormalizeOwnerInformation(self): """Ensure that owner and ownerDomain are set to sane values.""" owner_domain = self.get('ownerDomain', '') if not owner_domain: root_url = self.get('rootUrl') if root_url: owner_domain = urlparse.urlparse(root_url).hostname # Normalize google domains. if any(owner_domain.endswith(d) for d in _RECOGNIZED_GOOGLE_DOMAINS): owner_domain = 'google.com' if owner_domain: owner_domain = utilities.SanitizeDomain(owner_domain) else: owner_domain = _DEFAULT_OWNER_DOMAIN self.SetTemplateValue('ownerDomain', owner_domain) if not self.get('ownerName'): if owner_domain == _DEFAULT_OWNER_DOMAIN: owner_name = _DEFAULT_OWNER_NAME else: owner_name = owner_domain.replace('.', '_') self.SetTemplateValue('ownerName', owner_name) if not self.get('owner'): self.SetTemplateValue('owner', self['ownerName'].lower()) def _NormalizeUrlComponents(self): """Sets template values concerning the path to the service. Sets rootUrl and servicePath from the values given or defaults based on what is available. Verifies them for safeness. The hierarchy of the possible inputs is: use rootUrl + servicePath as the best choice if it exists (v1new) or rpcPath or use baseUrl (v1) or use basePath (v1) or restBasePath (v0.3) or default to 'api/version' Raises: ValueError: if the values available are inconsistent or disallowed. """ # If both rootUrl and servicePath exist, they equal what is in baseUrl. root_url = self.values.get('rootUrl') service_path = self.values.get('servicePath') rpc_path = self.values.get('rpcPath') if root_url: # oauth2 has a servicePath of "". This is wierd but OK for that API, but # it means we must explicitly check against None. if service_path is not None: base_url = root_url + service_path elif rpc_path: base_url = rpc_path else: raise ValueError('Neither servicePath nor rpcPath is defined.') else: base_url = self.values.get('baseUrl') # If we have a full path ('https://superman.appspot.com/kryptonite/hurts'), # then go with that, otherwise just use the various things which might # hint at the servicePath. best_path = (base_url or self.values.get('basePath') or self.values.get('restBasePath') or '/%s/%s/' % (self.values['name'], self.values['version'])) if best_path.find('..') >= 0: raise ValueError('api path must not contain ".." (%s)' % best_path) # And let urlparse to the grunt work of normalizing and parsing. url_parts = urlparse.urlparse(best_path) scheme = url_parts.scheme or 'https' service_host = url_parts.netloc or _DEFAULT_SERVICE_HOST base_path = url_parts.path if not root_url: self._api.SetTemplateValue('rootUrl', '%s://%s/' % (scheme, service_host)) if service_path is None: self._api.SetTemplateValue('servicePath', base_path[1:]) # Make sure template writers do not revert self._api.DeleteTemplateValue('baseUrl') self._api.DeleteTemplateValue('basePath') self._api.DeleteTemplateValue('serviceHost') def ModelClasses(self): """Return all the model classes.""" ret = set( s for s in self._schemas.itervalues() if isinstance(s, Schema) or isinstance(s, data_types.MapDataType)) return sorted(ret, key=operator.attrgetter('class_name')) def TopLevelModelClasses(self): """Return the models which are not children of another model.""" return [m for m in self.ModelClasses() if not m.parent] def DataTypeFromJson(self, type_dict, default_name, parent=None, wire_name=None): """Returns a schema object represented by a JSON Schema dictionary. Evaluate a JSON schema dictionary and return an appropriate schema object. If a data type is defined in-line, then create the schema dynamically. If the schema is a $ref to another, return the previously created schema or a lazy reference. If the type_dict is None, a blank schema will be created. Args: type_dict: A dict of the form expected of a request or response member of a method description. See the Discovery specification for more. default_name: The unique name to give the schema if we have to create it. parent: The schema where I was referenced. If we cannot determine that this is a top level schema, set the parent to this. wire_name: The name which will identify objects of this type in data on the wire. Returns: A Schema object. """ # new or not initialized, create a fresh one schema = Schema.Create(self, default_name, type_dict or {}, wire_name, parent) # Only put it in our by-name list if it is a real object if isinstance(schema, Schema) or isinstance(schema, data_types.MapDataType): # Use the path to the schema as a key. This means that an anonymous class # for the 'person' property under the schema 'Activity' will have the # unique name 'Activity.person', rather than 'ActivityPerson'. path = '.'.join( [a.values.get('wireName', '<anon>') for a in schema.full_path]) _LOGGER.debug('DataTypeFromJson: add %s to cache', path) self._schemas[path] = schema return schema def AddMethod(self, method): """Add a new method to the set of all methods.""" self._all_methods.append(method) self._methods_by_name[method.values['rpcMethod']] = method def MethodByName(self, method_name): """Find a method by name. Args: method_name: (str) the full RPC name of a method defined by this API. Returns: Method object or None if not found. """ return self._methods_by_name.get(method_name) def SchemaByName(self, schema_name): """Find a schema by name. Args: schema_name: (str) name of a schema defined by this API. Returns: Schema object or None if not found. """ return self._schemas.get(schema_name, None) def SetVariantInfo(self, ref, discriminant, value, schema): """Sets variant info for the given reference.""" if ref in self._variant_info: logging.warning("Base type of '%s' changed from '%s' to '%s'. " "This is an indication that a variant schema is used " "from multiple base schemas and may result in an " "inconsistent model.", ref, self._base_type[ref].wireName, schema.wireName) self._variant_info[ref] = {'discriminant': discriminant, 'value': value, 'schema': schema} def VisitAll(self, func): """Visit all nodes of an API tree and apply a function to each. Walks a tree and calls a function on each element of it. This should be called after the API is fully loaded. Args: func: (function) Method to call on each object. """ _LOGGER.debug('Applying function to all nodes') func(self._containing_module) func(self._module) func(self._model_module) for resource in self.values['resources']: self._VisitResource(resource, func) # Top level methods for method in self.values['methods']: self._VisitMethod(method, func) for parameter in self.values['parameters']: func(parameter) func(parameter.data_type) for schema in self._schemas.values(): self._VisitSchema(schema, func) for scope in self.GetTemplateValue('authscopes') or []: func(scope) def _VisitMethod(self, method, func): """Visit a method, calling a function on every child. Args: method: (Method) The Method to visit. func: (function) Method to call on each object. """ func(method) for parameter in method.parameters: func(parameter) def _VisitResource(self, resource, func): """Visit a resource tree, calling a function on every child. Calls down recursively to sub resources. Args: resource: (Resource) The Resource to visit. func: (function) Method to call on each object. """ func(resource) for method in resource.values['methods']: self._VisitMethod(method, func) for r in resource.values['resources']: self._VisitResource(r, func) def _VisitSchema(self, schema, func): """Visit a schema tree, calling a function on every child. Args: schema: (Schema) The Schema to visit. func: (function) Method to call on each object. """ func(schema) func(schema.module) for prop in schema.values.get('properties', []): func(prop) for child in self.children: func(child) # Do not warn about unused arguments, pylint: disable=unused-argument def ToClassName(self, s, element, element_type=None): """Convert a name to a suitable class name in the target language. This default implementation camel cases the string, which is appropriate for some languages. Subclasses are encouraged to override this. Args: s: (str) A rosy name of data element. element: (object) The object we are making a class name for. element_type: (str) Deprecated. The kind of object we are making a class name for. E.g. resource, method, schema. TODO(user): replace type in favor of class of element, but that will require changing the place where we call ToClassName with no element. Returns: A name suitable for use as a class in the generator's target language. """ return utilities.CamelCase(s).replace(' ', '') def NestedClassNameForProperty(self, name, schema): """Returns the class name of an object nested in a property.""" # TODO(user): This functionality belongs in the language model, but # because of the way the api is bootstrapped, that isn't available when we # need it. When language model is available from the start, this should be # moved. return '%s%s' % (schema.class_name, utilities.CamelCase(name)) @property def class_name(self): return self.values['className'] @property def model_module(self): return self._model_module @property def containing_module(self): return self._containing_module @property def all_methods(self): """All the methods in the entire API.""" return self._all_methods @property def top_level_methods(self): """All the methods at the API top level (not in a resource).""" return self._top_level_methods class Resource(template_objects.CodeObject): def __init__(self, api, name, def_dict, parent=None): """Creates a Resource. Args: api: (Api) The Api which owns this Resource. name: (string) The discovery name of the Resource. def_dict: (dict) The discovery dictionary for this Resource. parent: (CodeObject) The resource containing this method, if any. Top level resources have the API as a parent. """ super(Resource, self).__init__(def_dict, api, parent=parent, wire_name=name) self.ValidateName(name) class_name = api.ToClassName(name, self, element_type='resource') self.SetTemplateValue('className', class_name) # Replace methods dict with Methods self._methods = [] method_dict = self.values.get('methods') or {} for name in sorted(method_dict): self._methods.append(Method(api, name, method_dict[name], parent=self)) self.SetTemplateValue('methods', self._methods) # Get sub resources self._resources = [] r_def_dict = self.values.get('resources') or {} for name in sorted(r_def_dict): r = Resource(api, name, r_def_dict[name], parent=self) self._resources.append(r) self.SetTemplateValue('resources', self._resources) @property def methods(self): return self._methods @property def methods_dict(self): return {method['wireName']: method for method in self._methods} class AuthScope(template_objects.CodeObject): """The definition of an auth scope. An AuthScope defines these template values value: The scope url name: a sanitized version of the value, transformed so it generally can be used as an indentifier in code. Deprecated, use constantName description: the description of the scope. It also provides a template property which can be used after a language binding is set. constantName: A transformation of the value so it is suitable as a constant name in the specific language. """ GOOGLE_PREFIX = 'https://www.googleapis.com/auth/' HTTPS_PREFIX = 'https://' def __init__(self, api, value, def_dict): """Construct an auth scope. Args: api: (Api) The Api which owns this Property value: (string) The unique identifier of this scope, often a URL def_dict: (dict) The discovery dictionary for this auth scope. """ super(AuthScope, self).__init__(def_dict, api, wire_name=value) self._module = api.module self.SetTemplateValue('value', value) while value.endswith('/'): value = value[:-1] if 'description' not in self.values: self.SetTemplateValue('description', value) # Strip the common prefix to get a unique identifying name if value.startswith(AuthScope.GOOGLE_PREFIX): scope_id = value[len(AuthScope.GOOGLE_PREFIX):] elif value.startswith(AuthScope.HTTPS_PREFIX): # some comon scopes are are just a URL scope_id = value[len(AuthScope.HTTPS_PREFIX):] else: scope_id = value # We preserve the value stripped of the most common prefixes so we can # use it for building constantName in templates. self.SetTemplateValue('lastPart', scope_id) # replace all non alphanumeric with '_' to form 'name' name = ''.join([(c if c.isalnum() else '_') for c in scope_id.upper()]) self.SetTemplateValue('name', name) @property def constantName(self): # pylint: disable=g-bad-name """Overrides default behavior of constantName.""" return self._language_model.ApplyPolicy('constant', self, self.values['lastPart']) class Method(template_objects.CodeObject): """The definition of a method.""" def __init__(self, api, name, def_dict, parent=None): """Construct a method. Methods in REST discovery are inside of a resource. Note that the method name and id are calculable from each other. id will always be equal to api_name.resource_name[.sub_resource...].method_name. At least it should be, as that is the transformation Discovery makes from the API definition, which is essentially a flat list of methods, into a hierarchy of resources. Args: api: (Api) The Api which owns this Method. name: (string) The discovery name of the Method. def_dict: (dict) The discovery dictionary for this Method. parent: (CodeObject) The resource containing this Method, if any. Raises: ApiException: If the httpMethod type is not one we know how to handle. """ super(Method, self).__init__(def_dict, api, parent=(parent or api)) # TODO(user): Fix java templates to name vs. wireName correctly. Then # change the __init__ to have wire_name=def_dict.get('id') or name # then eliminate this line. self.SetTemplateValue('wireName', name) self.ValidateName(name) class_name = api.ToClassName(name, self, element_type='method') if parent and class_name == parent.values['className']: # Some languages complain when the collection name is the same as the # method name. class_name = '%sRequest' % class_name # The name is the key of the dict defining use. The id field is what you # have to use to call the method via RPC. That is unique, name might not be. self.SetTemplateValue('name', name) # Fix up very old discovery, which does not have an id. if 'id' not in self.values: self.values['id'] = name self.SetTemplateValue('className', class_name) http_method = def_dict.get('httpMethod', 'POST').upper() self.SetTemplateValue('httpMethod', http_method) self.SetTemplateValue('rpcMethod', def_dict.get('rpcMethod') or def_dict['id']) rest_path = def_dict.get('path') or def_dict.get('restPath') # TODO(user): if rest_path is not set, raise a good error and fail fast. self.SetTemplateValue('restPath', rest_path) # Figure out the input and output types and schemas for this method. expected_request = self.values.get('request') if expected_request: # TODO(user): RequestBody is only used if the schema is anonymous. # When we go to nested models, this could be a nested class off the # Method, making it unique without the silly name. Same for ResponseBody. request_schema = api.DataTypeFromJson(expected_request, '%sRequestContent' % name, parent=self) self.SetTemplateValue('requestType', request_schema) expected_response = def_dict.get('response') or def_dict.get('returns') if expected_response: response_schema = api.DataTypeFromJson(expected_response, '%sResponse' % name, parent=self) if self.values['wireName'] == 'get': response_schema.values['associatedResource'] = parent self.SetTemplateValue('responseType', response_schema) else: self.SetTemplateValue('responseType', api.void_type) # Make sure we can handle this method type and do any fixups. if http_method not in ['DELETE', 'GET', 'OPTIONS', 'PATCH', 'POST', 'PUT', 'PROPFIND', 'PROPPATCH', 'REPORT']: raise ApiException('Unknown HTTP method: %s' % http_method, def_dict) if http_method == 'GET': self.SetTemplateValue('requestType', None) # Replace parameters dict with Parameters. We try to order them by their # position in the request path so that the generated code can track the # more human readable definition, rather than the order of the parameters # in the discovery doc. order = self.values.get('parameterOrder', []) req_parameters = [] opt_parameters = [] for name, def_dict in self.values.get('parameters', {}).iteritems(): param = Parameter(api, name, def_dict, self) if name == 'alt': # Treat the alt parameter differently self.SetTemplateValue('alt', param) continue # Standard params are part of the generic request class # We want to push all parameters that aren't declared inside # parameterOrder after those that are. if param.values['wireName'] in order: req_parameters.append(param) else: # optional parameters are appended in the order they're declared. opt_parameters.append(param) # pylint: disable=g-long-lambda req_parameters.sort(lambda x, y: cmp(order.index(x.values['wireName']), order.index(y.values['wireName']))) # sort optional parameters by name to avoid code churn opt_parameters.sort(lambda x, y: cmp(x.values['wireName'], y.values['wireName'])) req_parameters.extend(opt_parameters) self.SetTemplateValue('parameters', req_parameters) self._InitMediaUpload(parent) self._InitPageable(api) api.AddMethod(self) def _InitMediaUpload(self, parent): media_upload = self.values.get('mediaUpload') if media_upload: if parent: parent.SetTemplateValue('isMedia', True) # Get which MIME Media Ranges are accepted for media uploads to this # method. accepted_mime_ranges = media_upload.get('accept') self.SetTemplateValue('accepted_mime_ranges', accepted_mime_ranges) max_size = media_upload.get('maxSize') self.SetTemplateValue('max_size', max_size) self.SetTemplateValue('max_size_bytes', convert_size.ConvertSize(max_size)) # Find which upload protocols are supported. upload_protocols = media_upload['protocols'] for upload_protocol in upload_protocols: self._SetUploadTemplateValues( upload_protocol, upload_protocols[upload_protocol]) def _InitPageable(self, api): response_type = self.values.get('responseType') if response_type == api.void_type: return next_page_token_name = self.FindPageToken( response_type.values.get('properties')) if not next_page_token_name: return is_page_token_parameter = True page_token_name = self.FindPageToken(self.optional_parameters) if not page_token_name: # page token may be field of request body instead of query parameter is_page_token_parameter = False request_type = self.values.get('requestType') if request_type: page_token_name = self.FindPageToken( request_type.values.get('properties')) if not page_token_name: return self.SetTemplateValue('isPageable', True) self.SetTemplateValue('isPagingStyleStandard', (is_page_token_parameter and page_token_name == 'pageToken' and next_page_token_name == 'nextPageToken')) def _SetUploadTemplateValues(self, upload_protocol, protocol_dict): """Sets upload specific template values. Args: upload_protocol: (str) The name of the upload protocol. Eg: 'simple' or 'resumable'. protocol_dict: (dict) The dictionary that corresponds to this upload protocol. It typically contains keys like 'path', 'multipart' etc. """ self.SetTemplateValue('%s_upload_supported' % upload_protocol, True) upload_path = protocol_dict.get('path') if upload_path: self.SetTemplateValue('%s_upload_path' % upload_protocol, upload_path) self.SetTemplateValue('%s_upload_multipart' % upload_protocol, protocol_dict.get('multipart', False)) @property def media_upload_parameters(self): return self.values.get('mediaUpload') @property def parameters(self): return self.values['parameters'] @property def optional_parameters(self): return [p for p in self.values['parameters'] if not p.required] @property def required_parameters(self): return [p for p in self.values['parameters'] if p.required] @property def path_parameters(self): return [p for p in self.values['parameters'] if p.location == 'path'] @property def query_parameters(self): return [p for p in self.values['parameters'] if p.location == 'query'] @staticmethod def FindCodeObjectWithWireName(things, wire_name): """Looks for an element having the given wire_name. Args: things: (array of DataType) List of parameters or properties to search. wire_name: (str) The wireName we are looking to find. Returns: None or element with the given wire_name. """ if not things: return None for e in things: if e.values['wireName'] == wire_name: return e return None @staticmethod def FindPageToken(things): """Looks for an element with a wireName like a page token. Args: things: (array of DataType) List of parameters or properties to search. Returns: None or page token name found. """ for token_name in _PAGE_TOKEN_NAMES: if Method.FindCodeObjectWithWireName(things, token_name): return token_name return None # # Expose some properties with the naming convention we use in templates # def optionalParameters(self): # pylint: disable=g-bad-name return self.optional_parameters def requiredParameters(self): # pylint: disable=g-bad-name return self.required_parameters def pathParameters(self): # pylint: disable=g-bad-name return self.path_parameters def queryParameters(self): # pylint: disable=g-bad-name return self.query_parameters class Parameter(template_objects.CodeObject): """The definition of a method parameter.""" def __init__(self, api, name, def_dict, method): super(Parameter, self).__init__(def_dict, api, parent=method, wire_name=name) self.ValidateName(name) self.schema = api # TODO(user): Deal with dots in names better. What we should do is: # For x.y, x.z create a little class X, with members y and z. Then # have the constructor method take an X. self._repeated = self.values.get('repeated', False) self._required = self.values.get('required', False) self._location = (self.values.get('location') or self.values.get('restParameterType') or 'query') # TODO(user): Why not just use Schema.Create here? referenced_schema = self.values.get('$ref') if referenced_schema: self._data_type = (api.SchemaByName(referenced_schema) or data_types.SchemaReference(referenced_schema, api)) elif def_dict.get('type') == 'array': self._data_type = Schema.Create(api, name, def_dict, name, method) elif self.values.get('enum'): self._data_type = data_types.Enum(def_dict, api, name, self.values.get('enum'), self.values.get('enumDescriptions'), parent=method) self.SetTemplateValue('enumType', self._data_type) else: self._data_type = data_types.PrimitiveDataType(def_dict, api, parent=self) if self._repeated: self._data_type = data_types.ArrayDataType(name, self._data_type, parent=self) @property def repeated(self): return self._repeated @property def required(self): return self._required @property def location(self): return self._location @property def code_type(self): return self._data_type.code_type @property def data_type(self): return self._data_type
bshaffer/google-api-php-client-services
generator/src/googleapis/codegen/api.py
Python
apache-2.0
37,026
0.005888
import collections from supriya.enums import CalculationRate from supriya.synthdefs import MultiOutUGen class XFadeRotate(MultiOutUGen): """ :: >>> source = supriya.ugens.In.ar(bus=0) >>> xfade_rotate = supriya.ugens.XFadeRotate.ar( ... n=0, ... source=source, ... ) >>> xfade_rotate XFadeRotate.ar() """ ### CLASS VARIABLES ### __slots__ = () _ordered_input_names = collections.OrderedDict( 'n', 'source', ) _valid_calculation_rates = None ### INITIALIZER ### def __init__( self, calculation_rate=None, n=0, source=None, ): MultiOutUGen.__init__( self, calculation_rate=calculation_rate, n=n, source=source, ) ### PUBLIC METHODS ### @classmethod def ar( cls, n=0, source=None, ): """ Constructs an audio-rate XFadeRotate. :: >>> source = supriya.ugens.In.ar(bus=0) >>> xfade_rotate = supriya.ugens.XFadeRotate.ar( ... n=0, ... source=source, ... ) >>> xfade_rotate XFadeRotate.ar() Returns ugen graph. """ import supriya.synthdefs calculation_rate = supriya.CalculationRate.AUDIO ugen = cls._new_expanded( calculation_rate=calculation_rate, n=n, source=source, ) return ugen @classmethod def kr( cls, n=0, source=None, ): """ Constructs a control-rate XFadeRotate. :: >>> source = supriya.ugens.In.ar(bus=0) >>> xfade_rotate = supriya.ugens.XFadeRotate.kr( ... n=0, ... source=source, ... ) >>> xfade_rotate XFadeRotate.kr() Returns ugen graph. """ import supriya.synthdefs calculation_rate = supriya.CalculationRate.CONTROL ugen = cls._new_expanded( calculation_rate=calculation_rate, n=n, source=source, ) return ugen # def newFromDesc(): ... ### PUBLIC PROPERTIES ### @property def n(self): """ Gets `n` input of XFadeRotate. :: >>> source = supriya.ugens.In.ar(bus=0) >>> xfade_rotate = supriya.ugens.XFadeRotate.ar( ... n=0, ... source=source, ... ) >>> xfade_rotate.n 0.0 Returns ugen input. """ index = self._ordered_input_names.index('n') return self._inputs[index] @property def source(self): """ Gets `source` input of XFadeRotate. :: >>> source = supriya.ugens.In.ar(bus=0) >>> xfade_rotate = supriya.ugens.XFadeRotate.ar( ... n=0, ... source=source, ... ) >>> xfade_rotate.source OutputProxy( source=In( bus=0.0, calculation_rate=CalculationRate.AUDIO, channel_count=1 ), output_index=0 ) Returns ugen input. """ index = self._ordered_input_names.index('source') return self._inputs[index]
josiah-wolf-oberholtzer/supriya
dev/etc/pending_ugens/XFadeRotate.py
Python
mit
3,541
0.001977