code
stringlengths
3
1.05M
repo_name
stringlengths
5
104
path
stringlengths
4
251
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
3
1.05M
# minqlx - Extends Quake Live's dedicated server with extra functionality and scripting. # Copyright (C) 2015 Mino <mino@minomino.org> # This file is part of minqlx. # minqlx is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # minqlx is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with minqlx. If not, see <http://www.gnu.org/licenses/>. # Since this isn't the actual module, we define it here and export # it later so that it can be accessed with minqlx.__doc__ by Sphinx. import minqlx import minqlx.database import collections import subprocess import threading import traceback import importlib import datetime import os.path import logging import shlex import sys import os from logging.handlers import RotatingFileHandler # em92: reasons not to support older than 3.5 # https://docs.python.org/3.5/whatsnew/3.5.html#whatsnew-ordereddict # plugins already assume, that they are running on python >= 3.5 if sys.version_info < (3,5): raise AssertionError("Only python 3.5 and later is supported by minqlx") # Team number -> string TEAMS = collections.OrderedDict(enumerate(("free", "red", "blue", "spectator"))) # Game type number -> string GAMETYPES = collections.OrderedDict([(i, gt) for i, gt in enumerate(("Free for All", "Duel", "Race", "Team Deathmatch", "Clan Arena", "Capture the Flag", "One Flag", "", "Harvester", "Freeze Tag", "Domination", "Attack and Defend", "Red Rover")) if gt]) # Game type number -> short string GAMETYPES_SHORT = collections.OrderedDict([(i, gt) for i, gt in enumerate(("ffa", "duel", "race", "tdm", "ca", "ctf", "1f", "", "har", "ft", "dom", "ad", "rr")) if gt]) # Connection states. CONNECTION_STATES = collections.OrderedDict(enumerate(("free", "zombie", "connected", "primed", "active"))) WEAPONS = collections.OrderedDict([(i, w) for i, w in enumerate(("", "g", "mg", "sg", "gl", "rl", "lg", "rg", "pg", "bfg", "gh", "ng", "pl", "cg", "hmg", "hands")) if w]) DEFAULT_PLUGINS = ("plugin_manager", "essentials", "motd", "permission", "ban", "silence", "clan", "names", "log", "workshop") # ==================================================================== # HELPERS # ==================================================================== def parse_variables(varstr, ordered=False): """ Parses strings of key-value pairs delimited by "\\" and puts them into a dictionary. :param varstr: The string with variables. :type varstr: str :param ordered: Whether it should use :class:`collections.OrderedDict` or not. :type ordered: bool :returns: dict -- A dictionary with the variables added as key-value pairs. """ if ordered: res = collections.OrderedDict() else: res = {} if not varstr.strip(): return res vars = varstr.lstrip("\\").split("\\") try: for i in range(0, len(vars), 2): res[vars[i]] = vars[i + 1] except IndexError: # Log and return incomplete dict. logger = minqlx.get_logger() logger.warning("Uneven number of keys and values: {}".format(varstr)) return res main_logger = None def get_logger(plugin=None): """ Provides a logger that should be used by your plugin for debugging, info and error reporting. It will automatically output to both the server console as well as to a file. :param plugin: The plugin that is using the logger. :type plugin: minqlx.Plugin :returns: logging.Logger -- The logger in question. """ if plugin: return logging.getLogger("minqlx." + str(plugin)) else: return logging.getLogger("minqlx") def _configure_logger(): logger = logging.getLogger("minqlx") logger.setLevel(logging.DEBUG) # File file_path = os.path.join(minqlx.get_cvar("fs_homepath"), "minqlx.log") maxlogs = minqlx.Plugin.get_cvar("qlx_logs", int) maxlogsize = minqlx.Plugin.get_cvar("qlx_logsSize", int) file_fmt = logging.Formatter("(%(asctime)s) [%(levelname)s @ %(name)s.%(funcName)s] %(message)s", "%H:%M:%S") file_handler = RotatingFileHandler(file_path, encoding="utf-8", maxBytes=maxlogsize, backupCount=maxlogs) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(file_fmt) logger.addHandler(file_handler) logger.info("============================= minqlx run @ {} =============================" .format(datetime.datetime.now())) # Console console_fmt = logging.Formatter("[%(name)s.%(funcName)s] %(levelname)s: %(message)s", "%H:%M:%S") console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_fmt) logger.addHandler(console_handler) def log_exception(plugin=None): """ Logs an exception using :func:`get_logger`. Call this in an except block. :param plugin: The plugin that is using the logger. :type plugin: minqlx.Plugin """ # TODO: Remove plugin arg and make it automatic. logger = get_logger(plugin) e = traceback.format_exc().rstrip("\n") for line in e.split("\n"): logger.error(line) def handle_exception(exc_type, exc_value, exc_traceback): """A handler for unhandled exceptions.""" # TODO: If exception was raised within a plugin, detect it and pass to log_exception() logger = get_logger(None) e = "".join(traceback.format_exception(exc_type, exc_value, exc_traceback)).rstrip("\n") for line in e.split("\n"): logger.error(line) _init_time = datetime.datetime.now() def uptime(): """Returns a :class:`datetime.timedelta` instance of the time since initialized.""" return datetime.datetime.now() - _init_time def owner(): """Returns the SteamID64 of the owner. This is set in the config.""" try: sid = int(minqlx.get_cvar("qlx_owner")) if sid == -1: raise RuntimeError return sid except: logger = minqlx.get_logger() logger.error("Failed to parse the Owner Steam ID. Make sure it's in SteamID64 format.") _stats = None def stats_listener(): """Returns the :class:`minqlx.StatsListener` instance used to listen for stats.""" return _stats def set_cvar_once(name, value, flags=0): if minqlx.get_cvar(name) is None: minqlx.set_cvar(name, value, flags) return True return False def set_cvar_limit_once(name, value, minimum, maximum, flags=0): if minqlx.get_cvar(name) is None: minqlx.set_cvar_limit(name, value, minimum, maximum, flags) return True return False def set_plugins_version(path): args_version = shlex.split("git describe --long --tags --dirty --always") args_branch = shlex.split("git rev-parse --abbrev-ref HEAD") # We keep environment variables, but remove LD_PRELOAD to avoid a warning the OS might throw. env = dict(os.environ) del env["LD_PRELOAD"] try: # Get the version using git describe. p = subprocess.Popen(args_version, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=path, env=env) p.wait(timeout=1) if p.returncode != 0: setattr(minqlx, "__plugins_version__", "NOT_SET") return version = p.stdout.read().decode().strip() # Get the branch using git rev-parse. p = subprocess.Popen(args_branch, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=path, env=env) p.wait(timeout=1) if p.returncode != 0: setattr(minqlx, "__plugins_version__", version) return branch = p.stdout.read().decode().strip() except (FileNotFoundError, subprocess.TimeoutExpired): setattr(minqlx, "__plugins_version__", "NOT_SET") return setattr(minqlx, "__plugins_version__", "{}-{}".format(version, branch)) def set_map_subtitles(): # We save the actual values before setting them so that we can retrieve them in Game. setattr(minqlx, "_map_title", minqlx.get_configstring(3)) setattr(minqlx, "_map_subtitle1", minqlx.get_configstring(678)) setattr(minqlx, "_map_subtitle2", minqlx.get_configstring(679)) cs = minqlx.get_configstring(678) if cs: cs += " - " minqlx.set_configstring(678, cs + "Running minqlx ^6{}^7 with plugins ^6{}^7." .format(minqlx.__version__, minqlx.__plugins_version__)) cs = minqlx.get_configstring(679) if cs: cs += " - " minqlx.set_configstring(679, cs + "Check ^6http://github.com/MinoMino/minqlx^7 for more details.") # ==================================================================== # DECORATORS # ==================================================================== def next_frame(func): def f(*args, **kwargs): minqlx.next_frame_tasks.append((func, args, kwargs)) return f def delay(time): """Delay a function call a certain amount of time. .. note:: It cannot guarantee you that it will be called right as the timer expires, but unless some plugin is for some reason blocking, then you can expect it to be called practically as soon as it expires. :param func: The function to be called. :type func: callable :param time: The number of seconds before the function should be called. :type time: int """ def wrap(func): def f(*args, **kwargs): minqlx.frame_tasks.enter(time, 0, func, args, kwargs) return f return wrap _thread_count = 0 _thread_name = "minqlxthread" def thread(func, force=False): """Starts a thread with the function passed as its target. If a function decorated with this is called within a function also decorated, it will **not** create a second thread unless told to do so with the *force* keyword. :param func: The function to be ran in a thread. :type func: callable :param force: Force it to create a new thread even if already in one created by this decorator. :type force: bool :returns: threading.Thread """ def f(*args, **kwargs): if not force and threading.current_thread().name.endswith(_thread_name): func(*args, **kwargs) else: global _thread_count name = func.__name__ + "-{}-{}".format(str(_thread_count), _thread_name) t = threading.Thread(target=func, name=name, args=args, kwargs=kwargs, daemon=True) t.start() _thread_count += 1 return t return f # ==================================================================== # CONFIG AND PLUGIN LOADING # ==================================================================== # We need to keep track of module instances for use with importlib.reload. _modules = {} class PluginLoadError(Exception): pass class PluginUnloadError(Exception): pass def load_preset_plugins(): plugins_temp = [] for p in minqlx.Plugin.get_cvar("qlx_plugins", list): if p == "DEFAULT": plugins_temp += list(DEFAULT_PLUGINS) else: plugins_temp.append(p) plugins = [] for p in plugins_temp: if p not in plugins: plugins.append(p) plugins_path = os.path.abspath(minqlx.get_cvar("qlx_pluginsPath")) plugins_dir = os.path.basename(plugins_path) if os.path.isdir(plugins_path): plugins = [p for p in plugins if "{}.{}".format(plugins_dir, p)] for p in plugins: load_plugin(p) else: raise(PluginLoadError("Cannot find the plugins directory '{}'." .format(os.path.abspath(plugins_path)))) def load_plugin(plugin): logger = get_logger(None) logger.info("Loading plugin '{}'...".format(plugin)) plugins = minqlx.Plugin._loaded_plugins plugins_path = os.path.abspath(minqlx.get_cvar("qlx_pluginsPath")) plugins_dir = os.path.basename(plugins_path) if not os.path.isfile(os.path.join(plugins_path, plugin + ".py")): raise PluginLoadError("No such plugin exists.") elif plugin in plugins: return reload_plugin(plugin) try: module = importlib.import_module("{}.{}".format(plugins_dir, plugin)) # We add the module regardless of whether it fails or not, otherwise we can't reload later. global _modules _modules[plugin] = module if not hasattr(module, plugin): raise(PluginLoadError("The plugin needs to have a class with the exact name as the file, minus the .py.")) plugin_class = getattr(module, plugin) if issubclass(plugin_class, minqlx.Plugin): plugins[plugin] = plugin_class() else: raise(PluginLoadError("Attempted to load a plugin that is not a subclass of 'minqlx.Plugin'.")) except: log_exception(plugin) raise def unload_plugin(plugin): logger = get_logger(None) logger.info("Unloading plugin '{}'...".format(plugin)) plugins = minqlx.Plugin._loaded_plugins if plugin in plugins: try: minqlx.EVENT_DISPATCHERS["unload"].dispatch(plugin) # Unhook its hooks. for hook in plugins[plugin].hooks: plugins[plugin].remove_hook(*hook) # Unregister commands. for cmd in plugins[plugin].commands: plugins[plugin].remove_command(cmd.name, cmd.handler) del plugins[plugin] except: log_exception(plugin) raise else: raise(PluginUnloadError("Attempted to unload a plugin that is not loaded.")) def reload_plugin(plugin): try: unload_plugin(plugin) except PluginUnloadError: pass try: global _modules if plugin in _modules: # Unloaded previously? importlib.reload(_modules[plugin]) load_plugin(plugin) except: log_exception(plugin) raise def initialize_cvars(): # Core minqlx.set_cvar_once("qlx_owner", "-1") minqlx.set_cvar_once("qlx_plugins", ", ".join(DEFAULT_PLUGINS)) minqlx.set_cvar_once("qlx_pluginsPath", "minqlx-plugins") minqlx.set_cvar_once("qlx_database", "Redis") minqlx.set_cvar_once("qlx_commandPrefix", "!") minqlx.set_cvar_once("qlx_logs", "2") minqlx.set_cvar_once("qlx_logsSize", str(3*10**6)) # 3 MB # Redis minqlx.set_cvar_once("qlx_redisAddress", "127.0.0.1") minqlx.set_cvar_once("qlx_redisDatabase", "0") minqlx.set_cvar_once("qlx_redisUnixSocket", "0") minqlx.set_cvar_once("qlx_redisPassword", "") # ==================================================================== # MAIN # ==================================================================== def initialize(): minqlx.register_handlers() def late_init(): """Initialization that needs to be called after QLDS has finished its own initialization. """ minqlx.initialize_cvars() # Set the default database plugins should use. # TODO: Make Plugin.database setting generic. if minqlx.get_cvar("qlx_database").lower() == "redis": minqlx.Plugin.database = minqlx.database.Redis # Get the plugins path and set minqlx.__plugins_version__. plugins_path = os.path.abspath(minqlx.get_cvar("qlx_pluginsPath")) set_plugins_version(plugins_path) # Initialize the logger now that we have fs_basepath. _configure_logger() logger = get_logger() # Set our own exception handler so that we can log them if unhandled. sys.excepthook = handle_exception # Add the plugins path to PATH so that we can load plugins later. sys.path.append(os.path.dirname(plugins_path)) logger.info("Loading preset plugins...") load_preset_plugins() if bool(int(minqlx.get_cvar("zmq_stats_enable"))): global _stats _stats = minqlx.StatsListener() logger.info("Stats listener started on {}.".format(_stats.address)) # Start polling. Not blocking due to decorator magic. Aw yeah. _stats.keep_receiving() logger.info("We're good to go!")
mgaertne/minqlx-plugin-tests
src/main/python/minqlx/_core.py
Python
bsd-3-clause
16,459
# -*- coding: utf-8 -*- # # Graph : graph package # # Copyright or Copr. 2006 INRIA - CIRAD - INRA # # File author(s): Jerome Chopard <jerome.chopard@sophia.inria.fr> # # Distributed under the Cecill-C License. # See accompanying file LICENSE.txt or copy at # http://www.cecill.info/licences/Licence_CeCILL-C_V1-en.html # # VPlants WebSite : https://gforge.inria.fr/projects/vplants/ # """This module provide a simple pure python implementation for a graph interface does not implement copy concept """ from id_dict import IdDict class GraphError(Exception): """ base class of all graph exceptions """ class InvalidEdge(GraphError, KeyError): """ exception raised when a wrong edge id is provided """ class InvalidVertex(GraphError, KeyError): """ exception raised when a wrong vertex id is provided """ class Graph(object): """Directed graph with multiple links in this implementation : - vertices are tuple of edge_in,edge_out - edges are tuple of source,target """ def __init__(self, graph=None, idgenerator="set"): """constructor if graph is not none make a copy of the topological structure of graph (i.e. don't use the same id) args: - graph (Graph): the graph to copy, default=None - idgenerator (str): type of idgenerator to use, default 'set' """ self._vertices = IdDict(idgenerator=idgenerator) self._edges = IdDict(idgenerator=idgenerator) if graph is not None: self.extend(graph) # ########################################################## # # Graph concept # # ########################################################## def source(self, eid): """Retrieve the source vertex of an edge args: - eid (int): edge id return: - (int): vertex id """ try: return self._edges[eid][0] except KeyError: raise InvalidEdge(eid) def target(self, eid): """Retrieve the target vertex of an edge args: - eid (int): edge id return: - (int): vertex id """ try: return self._edges[eid][1] except KeyError: raise InvalidEdge(eid) def edge_vertices(self, eid): """Retrieve both source and target vertex of an edge args: - eid (int): edge id return: - (int, int): source id, target id """ try: return self._edges[eid] except KeyError: raise InvalidEdge(eid) def edge(self, source, target): """Find the matching edge with same source and same target return None if it don't succeed args: - source (int): source vertex - target (int): target vertex return: - (int): edge id with same source and target - (None): if search is unsuccessful """ if target not in self: raise InvalidVertex(target) for eid in self.out_edges(source): if self.target(eid) == target: return eid return None def __contains__(self, vid): """magic alias for `has_vertex` """ return self.has_vertex(vid) def has_vertex(self, vid): """test whether a vertex belong to the graph args: - vid (int): id of vertex return: - (bool) """ return vid in self._vertices def has_edge(self, eid): """test whether an edge belong to the graph args: - eid (int): id of edge return: - (bool) """ return eid in self._edges def is_valid(self): """Test the validity of the graph return: - (bool) """ return True # ########################################################## # # Vertex List Graph Concept # # ########################################################## def vertices(self): """Iterator on all vertices return: - (iter of int) """ return iter(self._vertices) def __iter__(self): """Magic alias for `vertices` """ return iter(self._vertices) def nb_vertices(self): """Total number of vertices in the graph return: - (int) """ return len(self._vertices) def __len__(self): """Magic alias for `nb_vertices` """ return self.nb_vertices() def in_neighbors(self, vid): """Iterator on the neighbors of vid where edges are directed from neighbor to vid args: - vid (int): vertex id return: - (iter of int): iter of vertex id """ if vid not in self: raise InvalidVertex(vid) neighbors_list = [self.source(eid) for eid in self._vertices[vid][0]] return iter(set(neighbors_list)) def out_neighbors(self, vid): """Iterator on the neighbors of vid where edges are directed from vid to neighbor args: - vid (int): vertex id return: - (iter of int): iter of vertex id """ if vid not in self: raise InvalidVertex(vid) neighbors_list = [self.target(eid) for eid in self._vertices[vid][1]] return iter(set(neighbors_list)) def neighbors(self, vid): """Iterator on all neighbors of vid both in and out args: - vid (int): vertex id return: - (iter of int): iter of vertex id """ neighbors_list = list(self.in_neighbors(vid)) neighbors_list.extend(self.out_neighbors(vid)) return iter(set(neighbors_list)) def nb_in_neighbors(self, vid): """Number of in neighbors of vid where edges are directed from neighbor to vid args: - vid (int): vertex id return: - (int) """ neighbors_set = list(self.in_neighbors(vid)) return len(neighbors_set) def nb_out_neighbors(self, vid): """Number of out neighbors of vid where edges are directed from vid to neighbor args: - vid (int): vertex id return: - (int) """ neighbors_set = list(self.out_neighbors(vid)) return len(neighbors_set) def nb_neighbors(self, vid): """Total number of both in and out neighbors of vid args: - vid (int): vertex id return: - (int) """ neighbors_set = list(self.neighbors(vid)) return len(neighbors_set) # ########################################################## # # Edge List Graph Concept # # ########################################################## def _iter_edges(self, vid): """ internal function that perform 'edges' with vid not None """ link_in, link_out = self._vertices[vid] for eid in link_in: yield eid for eid in link_out: yield eid def edges(self, vid=None): """Iterate on all edges connected to a given vertex. If vid is None (default), iterate on all edges in the graph args: - vid (int): vertex holdings edges, default (None) return: - (iter of int): iterator on edge ids """ if vid is None: return iter(self._edges) if vid not in self: raise InvalidVertex(vid) return self._iter_edges(vid) def nb_edges(self, vid=None): """Number of edges connected to a given vertex. If vid is None (default), total number of edges in the graph args: - vid (int): vertex holdings edges, default (None) return: - (int) """ if vid is None: return len(self._edges) if vid not in self: raise InvalidVertex(vid) return len(self._vertices[vid][0]) + len(self._vertices[vid][1]) def in_edges(self, vid): """Iterate on all edges pointing to a given vertex. args: - vid (int): vertex target of edges return: - (iter of int): iterator on edge ids """ if vid not in self: raise InvalidVertex(vid) for eid in self._vertices[vid][0]: yield eid def out_edges(self, vid): """Iterate on all edges away from a given vertex. args: - vid (int): vertex source of edges return: - (iter of int): iterator on edge ids """ if vid not in self: raise InvalidVertex(vid) for eid in self._vertices[vid][1]: yield eid def nb_in_edges(self, vid): """Number of edges pointing to a given vertex. args: - vid (int): vertex target of edges return: - (int) """ if vid not in self: raise InvalidVertex(vid) return len(self._vertices[vid][0]) def nb_out_edges(self, vid): """Number of edges away from a given vertex. args: - vid (int): vertex source of edges return: - (int) """ if vid not in self: raise InvalidVertex(vid) return len(self._vertices[vid][1]) # ########################################################## # # Mutable Vertex Graph concept # # ########################################################## def add_vertex(self, vid=None): """Add a vertex to the graph. If vid is not provided create a new vid args: - vid (int): id to use. If None (default) will generate a new one return: - vid (int): id used for the new vertex """ try: return self._vertices.add((set(), set()), vid) except KeyError: raise InvalidVertex(vid) def remove_vertex(self, vid): """Remove a specified vertex of the graph. Also remove all edge attached to it. args: - vid (int): id of vertex to remove """ if vid not in self: raise InvalidVertex(vid) link_in, link_out = self._vertices[vid] for edge in list(link_in): self.remove_edge(edge) for edge in list(link_out): self.remove_edge(edge) del self._vertices[vid] def clear(self): """Remove all vertices and edges don't change references to objects """ self._edges.clear() self._vertices.clear() # ########################################################## # # Mutable Edge Graph concept # # ########################################################## def add_edge(self, sid, tid, eid=None): """Add an edge to the graph. If eid is not provided generate a new one. args: - sid (int): id of source vertex - tid (int): id of target vertex - eid (int): id to use. If None (default) will generate a new one return: - eid (int): id used for new edge """ if sid not in self: raise InvalidVertex(sid) if tid not in self: raise InvalidVertex(tid) try: eid = self._edges.add((sid, tid), eid) except KeyError: raise InvalidEdge(eid) self._vertices[sid][1].add(eid) self._vertices[tid][0].add(eid) return eid def remove_edge(self, eid): """Remove a specified edge from the graph. args: - eid (int): id of edge to remove """ if not self.has_edge(eid): raise InvalidEdge(eid) sid, tid = self._edges[eid] self._vertices[sid][1].remove(eid) self._vertices[tid][0].remove(eid) del self._edges[eid] def clear_edges(self): """Remove all the edges of the graph don't change references to objects """ self._edges.clear() for vid, (in_set, out_set) in self._vertices.iteritems(): in_set.clear() out_set.clear() # ########################################################## # # Extend Graph concept # # ########################################################## def extend(self, graph): """Add the specified graph to self, create new vid and eid args: - graph (Graph): the graph to add return: - (dict of (int, int)): mapping between vertex id in graph and vertex id in extended self - (dict of (int, int)): mapping between edge id in graph and edge id in extended self """ # vertex adding trans_vid = {} for vid in list(graph.vertices()): trans_vid[vid] = self.add_vertex() # edge adding trans_eid = {} for eid in list(graph.edges()): sid = trans_vid[graph.source(eid)] tid = trans_vid[graph.target(eid)] trans_eid[eid] = self.add_edge(sid, tid) return trans_vid, trans_eid def sub_graph(self, vids): """ """ raise NotImplemented # from copy import deepcopy # vids = set(vids) # # result = deepcopy(self) # result._vertices.clear() # result._edges.clear() # # for key, edges in self._vertices.items(): # if key in vids: # inedges, outedges = edges # sortedinedges = set( # [eid for eid in inedges if self.source(eid) in vids]) # sortedoutedges = set( # [eid for eid in outedges if self.target(eid) in vids]) # result._vertices.add((sortedinedges, sortedoutedges), key) # for eid in sortedoutedges: # result._edges.add(self._edges[eid], eid) # # return result
revesansparole/oacontainer
src/openalea/container/graph.py
Python
mit
14,189
""" Test functions for stats module """ from __future__ import division, print_function, absolute_import from numpy.testing import TestCase, run_module_suite, assert_equal, \ assert_array_equal, assert_almost_equal, assert_array_almost_equal, \ assert_allclose, assert_, assert_raises, rand, dec from numpy.testing.utils import WarningManager from nose import SkipTest import numpy import numpy as np from numpy import typecodes, array import scipy.stats as stats from scipy.stats.distributions import argsreduce import warnings def kolmogorov_check(diststr, args=(), N=20, significance=0.01): qtest = stats.ksoneisf(significance, N) cdf = eval('stats.'+diststr+'.cdf') dist = eval('stats.'+diststr) # Get random numbers kwds = {'size':N} vals = numpy.sort(dist.rvs(*args, **kwds)) cdfvals = cdf(vals, *args) q = max(abs(cdfvals - np.arange(1.0, N+1)/N)) assert_(q < qtest, msg="Failed q=%f, bound=%f, alpha=%f" % (q, qtest, significance)) return # generate test cases to test cdf and distribution consistency dists = ['uniform','norm','lognorm','expon','beta', 'powerlaw','bradford','burr','fisk','cauchy','halfcauchy', 'foldcauchy','gamma','gengamma','loggamma', 'alpha','anglit','arcsine','betaprime','erlang', 'dgamma','exponweib','exponpow','frechet_l','frechet_r', 'gilbrat','f','ncf','chi2','chi','nakagami','genpareto', 'genextreme','genhalflogistic','pareto','lomax','halfnorm', 'halflogistic','fatiguelife','foldnorm','ncx2','t','nct', 'weibull_min','weibull_max','dweibull','maxwell','rayleigh', 'genlogistic', 'logistic','gumbel_l','gumbel_r','gompertz', 'hypsecant', 'laplace', 'reciprocal','triang','tukeylambda', 'vonmises', 'pearson3'] # check function for test generator def check_distribution(dist, args, alpha): D,pval = stats.kstest(dist,'', args=args, N=1000) if (pval < alpha): D,pval = stats.kstest(dist,'',args=args, N=1000) #if (pval < alpha): # D,pval = stats.kstest(dist,'',args=args, N=1000) assert_(pval > alpha, msg="D = " + str(D) + "; pval = " + str(pval) + \ "; alpha = " + str(alpha) + "\nargs = " + str(args)) # nose test generator def test_all_distributions(): for dist in dists: distfunc = getattr(stats, dist) nargs = distfunc.numargs alpha = 0.01 if dist == 'fatiguelife': alpha = 0.001 if dist == 'erlang': args = (4,)+tuple(rand(2)) elif dist == 'frechet': args = tuple(2*rand(1))+(0,)+tuple(2*rand(2)) elif dist == 'triang': args = tuple(rand(nargs)) elif dist == 'reciprocal': vals = rand(nargs) vals[1] = vals[0] + 1.0 args = tuple(vals) elif dist == 'vonmises': yield check_distribution, dist, (10,), alpha yield check_distribution, dist, (101,), alpha args = tuple(1.0+rand(nargs)) else: args = tuple(1.0+rand(nargs)) yield check_distribution, dist, args, alpha def check_vonmises_pdf_periodic(k,l,s,x): vm = stats.vonmises(k,loc=l,scale=s) assert_almost_equal(vm.pdf(x),vm.pdf(x%(2*numpy.pi*s))) def check_vonmises_cdf_periodic(k,l,s,x): vm = stats.vonmises(k,loc=l,scale=s) assert_almost_equal(vm.cdf(x)%1,vm.cdf(x%(2*numpy.pi*s))%1) def test_vonmises_pdf_periodic(): for k in [0.1, 1, 101]: for x in [0,1,numpy.pi,10,100]: yield check_vonmises_pdf_periodic, k, 0, 1, x yield check_vonmises_pdf_periodic, k, 1, 1, x yield check_vonmises_pdf_periodic, k, 0, 10, x yield check_vonmises_cdf_periodic, k, 0, 1, x yield check_vonmises_cdf_periodic, k, 1, 1, x yield check_vonmises_cdf_periodic, k, 0, 10, x class TestRandInt(TestCase): def test_rvs(self): vals = stats.randint.rvs(5,30,size=100) assert_(numpy.all(vals < 30) & numpy.all(vals >= 5)) assert_(len(vals) == 100) vals = stats.randint.rvs(5,30,size=(2,50)) assert_(numpy.shape(vals) == (2,50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.randint.rvs(15,46) assert_((val >= 15) & (val < 46)) assert_(isinstance(val, numpy.ScalarType), msg=repr(type(val))) val = stats.randint(15,46).rvs(3) assert_(val.dtype.char in typecodes['AllInteger']) def test_pdf(self): k = numpy.r_[0:36] out = numpy.where((k >= 5) & (k < 30), 1.0/(30-5), 0) vals = stats.randint.pmf(k,5,30) assert_array_almost_equal(vals,out) def test_cdf(self): x = numpy.r_[0:36:100j] k = numpy.floor(x) out = numpy.select([k>=30,k>=5],[1.0,(k-5.0+1)/(30-5.0)],0) vals = stats.randint.cdf(x,5,30) assert_array_almost_equal(vals, out, decimal=12) class TestBinom(TestCase): def test_rvs(self): vals = stats.binom.rvs(10, 0.75, size=(2, 50)) assert_(numpy.all(vals >= 0) & numpy.all(vals <= 10)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.binom.rvs(10, 0.75) assert_(isinstance(val, int)) val = stats.binom(10, 0.75).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllInteger']) def test_pmf(self): # regression test for Ticket #1842 vals1 = stats.binom.pmf(100, 100,1) vals2 = stats.binom.pmf(0, 100,0) assert_allclose(vals1, 1.0, rtol=1e-15, atol=0) assert_allclose(vals2, 1.0, rtol=1e-15, atol=0) class TestBernoulli(TestCase): def test_rvs(self): vals = stats.bernoulli.rvs(0.75, size=(2, 50)) assert_(numpy.all(vals >= 0) & numpy.all(vals <= 1)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.bernoulli.rvs(0.75) assert_(isinstance(val, int)) val = stats.bernoulli(0.75).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllInteger']) class TestNBinom(TestCase): def test_rvs(self): vals = stats.nbinom.rvs(10, 0.75, size=(2, 50)) assert_(numpy.all(vals >= 0)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.nbinom.rvs(10, 0.75) assert_(isinstance(val, int)) val = stats.nbinom(10, 0.75).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllInteger']) def test_pmf(self): # regression test for ticket 1779 assert_allclose(np.exp(stats.nbinom.logpmf(700, 721, 0.52)), stats.nbinom.pmf(700, 721, 0.52)) class TestGeom(TestCase): def test_rvs(self): vals = stats.geom.rvs(0.75, size=(2, 50)) assert_(numpy.all(vals >= 0)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.geom.rvs(0.75) assert_(isinstance(val, int)) val = stats.geom(0.75).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllInteger']) def test_pmf(self): vals = stats.geom.pmf([1,2,3],0.5) assert_array_almost_equal(vals,[0.5,0.25,0.125]) def test_logpmf(self): # regression test for ticket 1793 vals1 = np.log(stats.geom.pmf([1,2,3], 0.5)) vals2 = stats.geom.logpmf([1,2,3], 0.5) assert_allclose(vals1, vals2, rtol=1e-15, atol=0) def test_cdf_sf(self): vals = stats.geom.cdf([1,2,3],0.5) vals_sf = stats.geom.sf([1,2,3],0.5) expected = array([0.5,0.75,0.875]) assert_array_almost_equal(vals,expected) assert_array_almost_equal(vals_sf,1-expected) class TestTruncnorm(TestCase): def test_ppf_ticket1131(self): vals = stats.truncnorm.ppf([-0.5,0,1e-4,0.5, 1-1e-4,1,2], -1., 1., loc=[3]*7, scale=2) expected = np.array([np.nan, 1, 1.00056419, 3, 4.99943581, 5, np.nan]) assert_array_almost_equal(vals, expected) def test_isf_ticket1131(self): vals = stats.truncnorm.isf([-0.5,0,1e-4,0.5, 1-1e-4,1,2], -1., 1., loc=[3]*7, scale=2) expected = np.array([np.nan, 5, 4.99943581, 3, 1.00056419, 1, np.nan]) assert_array_almost_equal(vals, expected) class TestHypergeom(TestCase): def test_rvs(self): vals = stats.hypergeom.rvs(20, 10, 3, size=(2, 50)) assert_(numpy.all(vals >= 0) & numpy.all(vals <= 3)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.hypergeom.rvs(20, 3, 10) assert_(isinstance(val, int)) val = stats.hypergeom(20, 3, 10).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllInteger']) def test_precision(self): # comparison number from mpmath M = 2500 n = 50 N = 500 tot = M good = n hgpmf = stats.hypergeom.pmf(2, tot, good, N) assert_almost_equal(hgpmf, 0.0010114963068932233, 11) def test_precision2(self): """Test hypergeom precision for large numbers. See #1218.""" # Results compared with those from R. oranges = 9.9e4 pears = 1.1e5 fruits_eaten = np.array([3, 3.8, 3.9, 4, 4.1, 4.2, 5]) * 1e4 quantile = 2e4 res = [] for eaten in fruits_eaten: res.append(stats.hypergeom.sf(quantile, oranges + pears, oranges, eaten)) expected = np.array([0, 1.904153e-114, 2.752693e-66, 4.931217e-32, 8.265601e-11, 0.1237904, 1]) assert_allclose(res, expected, atol=0, rtol=5e-7) # Test with array_like first argument quantiles = [1.9e4, 2e4, 2.1e4, 2.15e4] res2 = stats.hypergeom.sf(quantiles, oranges + pears, oranges, 4.2e4) expected2 = [1, 0.1237904, 6.511452e-34, 3.277667e-69] assert_allclose(res2, expected2, atol=0, rtol=5e-7) class TestLogser(TestCase): def test_rvs(self): vals = stats.logser.rvs(0.75, size=(2, 50)) assert_(numpy.all(vals >= 1)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.logser.rvs(0.75) assert_(isinstance(val, int)) val = stats.logser(0.75).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllInteger']) class TestPearson3(TestCase): def test_rvs(self): vals = stats.pearson3.rvs(0.1, size=(2, 50)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllFloat']) val = stats.pearson3.rvs(0.5) assert_(isinstance(val, float)) val = stats.pearson3(0.5).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllFloat']) assert_(len(val) == 3) def test_pdf(self): vals = stats.pearson3.pdf(2, [0.0, 0.1, 0.2]) assert_allclose(vals, np.array([0.05399097, 0.05555481, 0.05670246]), atol=1e-6) vals = stats.pearson3.pdf(-3, 0.1) assert_allclose(vals, np.array([0.00313791]), atol=1e-6) vals = stats.pearson3.pdf([-3,-2,-1,0,1], 0.1) assert_allclose(vals, np.array([0.00313791, 0.05192304, 0.25028092, 0.39885918, 0.23413173]), atol=1e-6) def test_cdf(self): vals = stats.pearson3.cdf(2, [0.0, 0.1, 0.2]) assert_allclose(vals, np.array([0.97724987, 0.97462004, 0.97213626]), atol=1e-6) vals = stats.pearson3.cdf(-3, 0.1) assert_allclose(vals, [0.00082256], atol=1e-6) vals = stats.pearson3.cdf([-3,-2,-1,0,1], 0.1) assert_allclose(vals, [8.22563821e-04, 1.99860448e-02, 1.58550710e-01, 5.06649130e-01, 8.41442111e-01], atol=1e-6) class TestPoisson(TestCase): def test_rvs(self): vals = stats.poisson.rvs(0.5, size=(2, 50)) assert_(numpy.all(vals >= 0)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.poisson.rvs(0.5) assert_(isinstance(val, int)) val = stats.poisson(0.5).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllInteger']) class TestZipf(TestCase): def test_rvs(self): vals = stats.zipf.rvs(1.5, size=(2, 50)) assert_(numpy.all(vals >= 1)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.zipf.rvs(1.5) assert_(isinstance(val, int)) val = stats.zipf(1.5).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllInteger']) class TestDLaplace(TestCase): def test_rvs(self): vals = stats.dlaplace.rvs(1.5 , size=(2, 50)) assert_(numpy.shape(vals) == (2, 50)) assert_(vals.dtype.char in typecodes['AllInteger']) val = stats.dlaplace.rvs(1.5) assert_(isinstance(val, int)) val = stats.dlaplace(1.5).rvs(3) assert_(isinstance(val, numpy.ndarray)) assert_(val.dtype.char in typecodes['AllInteger']) def test_rvgeneric_std(): # Regression test for #1191 assert_array_almost_equal(stats.t.std([5, 6]), [1.29099445, 1.22474487]) class TestRvDiscrete(TestCase): def test_rvs(self): states = [-1,0,1,2,3,4] probability = [0.0,0.3,0.4,0.0,0.3,0.0] samples = 1000 r = stats.rv_discrete(name='sample',values=(states,probability)) x = r.rvs(size=samples) assert_(isinstance(x, numpy.ndarray)) for s,p in zip(states,probability): assert_(abs(sum(x == s)/float(samples) - p) < 0.05) x = r.rvs() assert_(isinstance(x, int)) class TestExpon(TestCase): def test_zero(self): assert_equal(stats.expon.pdf(0),1) def test_tail(self): # Regression test for ticket 807 assert_equal(stats.expon.cdf(1e-18), 1e-18) assert_equal(stats.expon.isf(stats.expon.sf(40)), 40) class TestGenExpon(TestCase): def test_pdf_unity_area(self): from scipy.integrate import simps # PDF should integrate to one assert_almost_equal(simps(stats.genexpon.pdf(numpy.arange(0,10,0.01), 0.5, 0.5, 2.0), dx=0.01), 1, 1) def test_cdf_bounds(self): # CDF should always be positive cdf = stats.genexpon.cdf(numpy.arange(0, 10, 0.01), 0.5, 0.5, 2.0) assert_(numpy.all((0 <= cdf) & (cdf <= 1))) class TestExponpow(TestCase): def test_tail(self): assert_almost_equal(stats.exponpow.cdf(1e-10, 2.), 1e-20) assert_almost_equal(stats.exponpow.isf(stats.exponpow.sf(5, .8), .8), 5) class TestSkellam(TestCase): def test_pmf(self): #comparison to R k = numpy.arange(-10, 15) mu1, mu2 = 10, 5 skpmfR = numpy.array( [4.2254582961926893e-005, 1.1404838449648488e-004, 2.8979625801752660e-004, 6.9177078182101231e-004, 1.5480716105844708e-003, 3.2412274963433889e-003, 6.3373707175123292e-003, 1.1552351566696643e-002, 1.9606152375042644e-002, 3.0947164083410337e-002, 4.5401737566767360e-002, 6.1894328166820688e-002, 7.8424609500170578e-002, 9.2418812533573133e-002, 1.0139793148019728e-001, 1.0371927988298846e-001, 9.9076583077406091e-002, 8.8546660073089561e-002, 7.4187842052486810e-002, 5.8392772862200251e-002, 4.3268692953013159e-002, 3.0248159818374226e-002, 1.9991434305603021e-002, 1.2516877303301180e-002, 7.4389876226229707e-003]) assert_almost_equal(stats.skellam.pmf(k, mu1, mu2), skpmfR, decimal=15) def test_cdf(self): #comparison to R, only 5 decimals k = numpy.arange(-10, 15) mu1, mu2 = 10, 5 skcdfR = numpy.array( [6.4061475386192104e-005, 1.7810985988267694e-004, 4.6790611790020336e-004, 1.1596768997212152e-003, 2.7077485103056847e-003, 5.9489760066490718e-003, 1.2286346724161398e-002, 2.3838698290858034e-002, 4.3444850665900668e-002, 7.4392014749310995e-002, 1.1979375231607835e-001, 1.8168808048289900e-001, 2.6011268998306952e-001, 3.5253150251664261e-001, 4.5392943399683988e-001, 5.5764871387982828e-001, 6.5672529695723436e-001, 7.4527195703032389e-001, 8.1945979908281064e-001, 8.7785257194501087e-001, 9.2112126489802404e-001, 9.5136942471639818e-001, 9.7136085902200120e-001, 9.8387773632530240e-001, 9.9131672394792536e-001]) assert_almost_equal(stats.skellam.cdf(k, mu1, mu2), skcdfR, decimal=5) class TestLognorm(TestCase): def test_pdf(self): # Regression test for Ticket #1471: avoid nan with 0/0 situation with np.errstate(divide='ignore'): pdf = stats.lognorm.pdf([0, 0.5, 1], 1) assert_array_almost_equal(pdf, [0.0, 0.62749608, 0.39894228]) class TestBeta(TestCase): def test_logpdf(self): # Regression test for Ticket #1326: avoid nan with 0*log(0) situation logpdf = stats.beta.logpdf(0,1,0.5) assert_almost_equal(logpdf, -0.69314718056) logpdf = stats.beta.logpdf(0,0.5,1) assert_almost_equal(logpdf, np.inf) def test_logpdf_ticket_1866(self): alpha, beta = 267, 1472 x = np.array([0.2, 0.5, 0.6]) b = stats.beta(alpha, beta) assert_allclose(b.logpdf(x).sum(), -1201.699061824062) assert_allclose(b.pdf(x), np.exp(b.logpdf(x))) class TestBetaPrime(TestCase): def test_logpdf(self): alpha, beta = 267, 1472 x = np.array([0.2, 0.5, 0.6]) b = stats.betaprime(alpha, beta) assert_(np.isfinite(b.logpdf(x)).all()) assert_allclose(b.pdf(x), np.exp(b.logpdf(x))) class TestGamma(TestCase): def test_pdf(self): # a few test cases to compare with R pdf = stats.gamma.pdf(90, 394, scale=1./5) assert_almost_equal(pdf, 0.002312341) pdf = stats.gamma.pdf(3, 10, scale=1./5) assert_almost_equal(pdf, 0.1620358) def test_logpdf(self): # Regression test for Ticket #1326: cornercase avoid nan with 0*log(0) # situation logpdf = stats.gamma.logpdf(0,1) assert_almost_equal(logpdf, 0) class TestChi2(TestCase): # regression tests after precision improvements, ticket:1041, not verified def test_precision(self): assert_almost_equal(stats.chi2.pdf(1000, 1000), 8.919133934753128e-003, 14) assert_almost_equal(stats.chi2.pdf(100, 100), 0.028162503162596778, 14) class TestArrayArgument(TestCase): #test for ticket:992 def test_noexception(self): rvs = stats.norm.rvs(loc=(np.arange(5)), scale=np.ones(5), size=(10,5)) assert_equal(rvs.shape, (10,5)) class TestDocstring(TestCase): def test_docstrings(self): # See ticket #761 if stats.rayleigh.__doc__ is not None: self.assertTrue("rayleigh" in stats.rayleigh.__doc__.lower()) if stats.bernoulli.__doc__ is not None: self.assertTrue("bernoulli" in stats.bernoulli.__doc__.lower()) def test_no_name_arg(self): # If name is not given, construction shouldn't fail. See #1508. stats.rv_continuous() stats.rv_discrete() class TestEntropy(TestCase): def test_entropy_positive(self): # See ticket #497 pk = [0.5,0.2,0.3] qk = [0.1,0.25,0.65] eself = stats.entropy(pk,pk) edouble = stats.entropy(pk,qk) assert_(0.0 == eself) assert_(edouble >= 0.0) def test_entropy_base(self): pk = np.ones(16, float) S = stats.entropy(pk, base=2.) assert_(abs(S - 4.) < 1.e-5) qk = np.ones(16, float) qk[:8] = 2. S = stats.entropy(pk, qk) S2 = stats.entropy(pk, qk, base=2.) assert_(abs(S/S2 - np.log(2.)) < 1.e-5) def test_entropy_zero(self): # Test for PR-479 assert_almost_equal(stats.entropy([0, 1, 2]), 0.63651416829481278, decimal=12) def TestArgsreduce(): a = array([1,3,2,1,2,3,3]) b,c = argsreduce(a > 1, a, 2) assert_array_equal(b, [3,2,2,3,3]) assert_array_equal(c, [2,2,2,2,2]) b,c = argsreduce(2 > 1, a, 2) assert_array_equal(b, a[0]) assert_array_equal(c, [2]) b,c = argsreduce(a > 0, a, 2) assert_array_equal(b, a) assert_array_equal(c, [2] * numpy.size(a)) class TestFitMethod(object): skip = ['ncf'] @dec.slow def test_fit(self): def check(func, dist, args, alpha): if dist in self.skip: raise SkipTest("%s fit known to fail" % dist) distfunc = getattr(stats, dist) res = distfunc.rvs(*args, **{'size':200}) vals = distfunc.fit(res) vals2 = distfunc.fit(res, optimizer='powell') # Only check the length of the return # FIXME: should check the actual results to see if we are 'close' # to what was created --- but what is 'close' enough if dist in ['erlang', 'frechet']: assert_(len(vals)==len(args)) assert_(len(vals2)==len(args)) else: assert_(len(vals) == 2+len(args)) assert_(len(vals2)==2+len(args)) for func, dist, args, alpha in test_all_distributions(): yield check, func, dist, args, alpha @dec.slow def test_fix_fit(self): def check(func, dist, args, alpha): # Not sure why 'ncf', and 'beta' are failing # erlang and frechet have different len(args) than distfunc.numargs if dist in self.skip + ['erlang', 'frechet']: raise SkipTest("%s fit known to fail" % dist) distfunc = getattr(stats, dist) res = distfunc.rvs(*args, **{'size':200}) vals = distfunc.fit(res,floc=0) vals2 = distfunc.fit(res,fscale=1) assert_(len(vals) == 2+len(args)) assert_(vals[-2] == 0) assert_(vals2[-1] == 1) assert_(len(vals2) == 2+len(args)) if len(args) > 0: vals3 = distfunc.fit(res, f0=args[0]) assert_(len(vals3) == 2+len(args)) assert_(vals3[0] == args[0]) if len(args) > 1: vals4 = distfunc.fit(res, f1=args[1]) assert_(len(vals4) == 2+len(args)) assert_(vals4[1] == args[1]) if len(args) > 2: vals5 = distfunc.fit(res, f2=args[2]) assert_(len(vals5) == 2+len(args)) assert_(vals5[2] == args[2]) for func, dist, args, alpha in test_all_distributions(): yield check, func, dist, args, alpha def test_fix_fit_2args_lognorm(self): """Regression test for #1551.""" np.random.seed(12345) x = stats.lognorm.rvs(0.25, 0., 20.0, size=20) assert_allclose(np.array(stats.lognorm.fit(x, floc=0, fscale=20)), [0.25888672, 0, 20], atol=1e-5) class TestFrozen(TestCase): """Test that a frozen distribution gives the same results as the original object. Only tested for the normal distribution (with loc and scale specified) and for the gamma distribution (with a shape parameter specified). """ def test_norm(self): dist = stats.norm frozen = stats.norm(loc=10.0, scale=3.0) result_f = frozen.pdf(20.0) result = dist.pdf(20.0, loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.cdf(20.0) result = dist.cdf(20.0, loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.ppf(0.25) result = dist.ppf(0.25, loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.isf(0.25) result = dist.isf(0.25, loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.sf(10.0) result = dist.sf(10.0, loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.median() result = dist.median(loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.mean() result = dist.mean(loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.var() result = dist.var(loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.std() result = dist.std(loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.entropy() result = dist.entropy(loc=10.0, scale=3.0) assert_equal(result_f, result) result_f = frozen.moment(2) result = dist.moment(2,loc=10.0, scale=3.0) assert_equal(result_f, result) def test_gamma(self): a = 2.0 dist = stats.gamma frozen = stats.gamma(a) result_f = frozen.pdf(20.0) result = dist.pdf(20.0, a) assert_equal(result_f, result) result_f = frozen.cdf(20.0) result = dist.cdf(20.0, a) assert_equal(result_f, result) result_f = frozen.ppf(0.25) result = dist.ppf(0.25, a) assert_equal(result_f, result) result_f = frozen.isf(0.25) result = dist.isf(0.25, a) assert_equal(result_f, result) result_f = frozen.sf(10.0) result = dist.sf(10.0, a) assert_equal(result_f, result) result_f = frozen.median() result = dist.median(a) assert_equal(result_f, result) result_f = frozen.mean() result = dist.mean(a) assert_equal(result_f, result) result_f = frozen.var() result = dist.var(a) assert_equal(result_f, result) result_f = frozen.std() result = dist.std(a) assert_equal(result_f, result) result_f = frozen.entropy() result = dist.entropy(a) assert_equal(result_f, result) result_f = frozen.moment(2) result = dist.moment(2, a) assert_equal(result_f, result) def test_regression_ticket_1293(self): # Create a frozen distribution. frozen = stats.lognorm(1) # Call one of its methods that does not take any keyword arguments. m1 = frozen.moment(2) # Now call a method that takes a keyword argument. s = frozen.stats(moments='mvsk') # Call moment(2) again. # After calling stats(), the following was raising an exception. # So this test passes if the following does not raise an exception. m2 = frozen.moment(2) # The following should also be true, of course. But it is not # the focus of this test. assert_equal(m1, m2) class TestExpect(TestCase): """Test for expect method. Uses normal distribution and beta distribution for finite bounds, and hypergeom for discrete distribution with finite support """ def test_norm(self): v = stats.norm.expect(lambda x: (x-5)*(x-5), loc=5, scale=2) assert_almost_equal(v, 4, decimal=14) m = stats.norm.expect(lambda x: (x), loc=5, scale=2) assert_almost_equal(m, 5, decimal=14) lb = stats.norm.ppf(0.05, loc=5, scale=2) ub = stats.norm.ppf(0.95, loc=5, scale=2) prob90 = stats.norm.expect(lambda x: 1, loc=5, scale=2, lb=lb, ub=ub) assert_almost_equal(prob90, 0.9, decimal=14) prob90c = stats.norm.expect(lambda x: 1, loc=5, scale=2, lb=lb, ub=ub, conditional=True) assert_almost_equal(prob90c, 1., decimal=14) def test_beta(self): #case with finite support interval ## >>> mtrue, vtrue = stats.beta.stats(10,5, loc=5., scale=2.) ## >>> mtrue, vtrue ## (array(6.333333333333333), array(0.055555555555555552)) v = stats.beta.expect(lambda x: (x-19/3.)*(x-19/3.), args=(10,5), loc=5, scale=2) assert_almost_equal(v, 1./18., decimal=13) m = stats.beta.expect(lambda x: x, args=(10,5), loc=5., scale=2.) assert_almost_equal(m, 19/3., decimal=13) ub = stats.beta.ppf(0.95, 10, 10, loc=5, scale=2) lb = stats.beta.ppf(0.05, 10, 10, loc=5, scale=2) prob90 = stats.beta.expect(lambda x: 1., args=(10,10), loc=5., scale=2.,lb=lb, ub=ub, conditional=False) assert_almost_equal(prob90, 0.9, decimal=13) prob90c = stats.beta.expect(lambda x: 1, args=(10,10), loc=5, scale=2, lb=lb, ub=ub, conditional=True) assert_almost_equal(prob90c, 1., decimal=13) def test_hypergeom(self): #test case with finite bounds #without specifying bounds m_true, v_true = stats.hypergeom.stats(20, 10, 8, loc=5.) m = stats.hypergeom.expect(lambda x: x, args=(20, 10, 8), loc=5.) assert_almost_equal(m, m_true, decimal=13) v = stats.hypergeom.expect(lambda x: (x-9.)**2, args=(20, 10, 8), loc=5.) assert_almost_equal(v, v_true, decimal=14) #with bounds, bounds equal to shifted support v_bounds = stats.hypergeom.expect(lambda x: (x-9.)**2, args=(20, 10, 8), loc=5., lb=5, ub=13) assert_almost_equal(v_bounds, v_true, decimal=14) #drop boundary points prob_true = 1-stats.hypergeom.pmf([5, 13], 20, 10, 8, loc=5).sum() prob_bounds = stats.hypergeom.expect(lambda x: 1, args=(20, 10, 8), loc=5., lb=6, ub=12) assert_almost_equal(prob_bounds, prob_true, decimal=13) #conditional prob_bc = stats.hypergeom.expect(lambda x: 1, args=(20, 10, 8), loc=5., lb=6, ub=12, conditional=True) assert_almost_equal(prob_bc, 1, decimal=14) #check simple integral prob_b = stats.hypergeom.expect(lambda x: 1, args=(20, 10, 8), lb=0, ub=8) assert_almost_equal(prob_b, 1, decimal=13) def test_poisson(self): #poisson, use lower bound only prob_bounds = stats.poisson.expect(lambda x: 1, args=(2,), lb=3, conditional=False) prob_b_true = 1-stats.poisson.cdf(2,2) assert_almost_equal(prob_bounds, prob_b_true, decimal=14) prob_lb = stats.poisson.expect(lambda x: 1, args=(2,), lb=2, conditional=True) assert_almost_equal(prob_lb, 1, decimal=14) def test_regression_ticket_1316(): # The following was raising an exception, because _construct_default_doc() # did not handle the default keyword extradoc=None. See ticket #1316. g = stats.distributions.gamma_gen(name='gamma') def test_regression_ticket_1326(): #adjust to avoid nan with 0*log(0) assert_almost_equal(stats.chi2.pdf(0.0, 2), 0.5, 14) def test_regression_tukey_lambda(): # Make sure that Tukey-Lambda distribution correctly handles non-positive lambdas. x = np.linspace(-5.0, 5.0, 101) olderr = np.seterr(divide='ignore') try: for lam in [0.0, -1.0, -2.0, np.array([[-1.0], [0.0], [-2.0]])]: p = stats.tukeylambda.pdf(x, lam) assert_((p != 0.0).all()) assert_(~np.isnan(p).all()) lam = np.array([[-1.0], [0.0], [2.0]]) p = stats.tukeylambda.pdf(x, lam) finally: np.seterr(**olderr) assert_(~np.isnan(p).all()) assert_((p[0] != 0.0).all()) assert_((p[1] != 0.0).all()) assert_((p[2] != 0.0).any()) assert_((p[2] == 0.0).any()) def test_regression_ticket_1421(): assert_('pdf(x, mu, loc=0, scale=1)' not in stats.poisson.__doc__) assert_('pmf(x,' in stats.poisson.__doc__) def test_nan_arguments_ticket_835(): assert_(np.isnan(stats.t.logcdf(np.nan))) assert_(np.isnan(stats.t.cdf(np.nan))) assert_(np.isnan(stats.t.logsf(np.nan))) assert_(np.isnan(stats.t.sf(np.nan))) assert_(np.isnan(stats.t.pdf(np.nan))) assert_(np.isnan(stats.t.logpdf(np.nan))) assert_(np.isnan(stats.t.ppf(np.nan))) assert_(np.isnan(stats.t.isf(np.nan))) assert_(np.isnan(stats.bernoulli.logcdf(np.nan, 0.5))) assert_(np.isnan(stats.bernoulli.cdf(np.nan, 0.5))) assert_(np.isnan(stats.bernoulli.logsf(np.nan, 0.5))) assert_(np.isnan(stats.bernoulli.sf(np.nan, 0.5))) assert_(np.isnan(stats.bernoulli.pmf(np.nan, 0.5))) assert_(np.isnan(stats.bernoulli.logpmf(np.nan, 0.5))) assert_(np.isnan(stats.bernoulli.ppf(np.nan, 0.5))) assert_(np.isnan(stats.bernoulli.isf(np.nan, 0.5))) def test_frozen_fit_ticket_1536(): np.random.seed(5678) true = np.array([0.25, 0., 0.5]) x = stats.lognorm.rvs(true[0], true[1], true[2], size=100) olderr = np.seterr(divide='ignore') try: params = np.array(stats.lognorm.fit(x, floc=0.)) finally: np.seterr(**olderr) assert_almost_equal(params, true, decimal=2) params = np.array(stats.lognorm.fit(x, fscale=0.5, loc=0)) assert_almost_equal(params, true, decimal=2) params = np.array(stats.lognorm.fit(x, f0=0.25, loc=0)) assert_almost_equal(params, true, decimal=2) params = np.array(stats.lognorm.fit(x, f0=0.25, floc=0)) assert_almost_equal(params, true, decimal=2) np.random.seed(5678) loc = 1 floc = 0.9 x = stats.norm.rvs(loc, 2., size=100) params = np.array(stats.norm.fit(x, floc=floc)) expected = np.array([floc, np.sqrt(((x-floc)**2).mean())]) assert_almost_equal(params, expected, decimal=4) def test_regression_ticket_1530(): # Check the starting value works for Cauchy distribution fit. np.random.seed(654321) rvs = stats.cauchy.rvs(size=100) params = stats.cauchy.fit(rvs) expected = (0.045, 1.142) assert_almost_equal(params, expected, decimal=1) def test_tukeylambda_stats_ticket_1545(): # Some test for the variance and kurtosis of the Tukey Lambda distr. # See test_tukeylamdba_stats.py for more tests. mv = stats.tukeylambda.stats(0, moments='mvsk') # Known exact values: expected = [0, np.pi**2/3, 0, 1.2] assert_almost_equal(mv, expected, decimal=10) mv = stats.tukeylambda.stats(3.13, moments='mvsk') # 'expected' computed with mpmath. expected = [0, 0.0269220858861465102, 0, -0.898062386219224104] assert_almost_equal(mv, expected, decimal=10) mv = stats.tukeylambda.stats(0.14, moments='mvsk') # 'expected' computed with mpmath. expected = [0, 2.11029702221450250, 0, -0.02708377353223019456] assert_almost_equal(mv, expected, decimal=10) def test_poisson_logpmf_ticket_1436(): assert_(np.isfinite(stats.poisson.logpmf(1500, 200))) def test_powerlaw_stats(): """Test the powerlaw stats function. This unit test is also a regression test for ticket 1548. The exact values are: mean: mu = a / (a + 1) variance: sigma**2 = a / ((a + 2) * (a + 1) ** 2) skewness: One formula (see http://en.wikipedia.org/wiki/Skewness) is gamma_1 = (E[X**3] - 3*mu*E[X**2] + 2*mu**3) / sigma**3 A short calculation shows that E[X**k] is a / (a + k), so gamma_1 can be implemented as n = a/(a+3) - 3*(a/(a+1))*a/(a+2) + 2*(a/(a+1))**3 d = sqrt(a/((a+2)*(a+1)**2)) ** 3 gamma_1 = n/d Either by simplifying, or by a direct calculation of mu_3 / sigma**3, one gets the more concise formula: gamma_1 = -2.0 * ((a - 1) / (a + 3)) * sqrt((a + 2) / a) kurtosis: (See http://en.wikipedia.org/wiki/Kurtosis) The excess kurtosis is gamma_2 = mu_4 / sigma**4 - 3 A bit of calculus and algebra (sympy helps) shows that mu_4 = 3*a*(3*a**2 - a + 2) / ((a+1)**4 * (a+2) * (a+3) * (a+4)) so gamma_2 = 3*(3*a**2 - a + 2) * (a+2) / (a*(a+3)*(a+4)) - 3 which can be rearranged to gamma_2 = 6 * (a**3 - a**2 - 6*a + 2) / (a*(a+3)*(a+4)) """ cases = [(1.0, (0.5, 1./12 , 0.0, -1.2)), (2.0, (2./3, 2./36, -0.56568542494924734, -0.6))] for a, exact_mvsk in cases: mvsk = stats.powerlaw.stats(a, moments="mvsk") assert_array_almost_equal(mvsk, exact_mvsk) def test_ksone_fit_freeze(): #Regression test for ticket #1638. d = np.array( [-0.18879233, 0.15734249, 0.18695107, 0.27908787, -0.248649, -0.2171497 , 0.12233512, 0.15126419, 0.03119282, 0.4365294 , 0.08930393, -0.23509903, 0.28231224, -0.09974875, -0.25196048, 0.11102028, 0.1427649 , 0.10176452, 0.18754054, 0.25826724, 0.05988819, 0.0531668 , 0.21906056, 0.32106729, 0.2117662 , 0.10886442, 0.09375789, 0.24583286, -0.22968366, -0.07842391, -0.31195432, -0.21271196, 0.1114243 , -0.13293002, 0.01331725, -0.04330977, -0.09485776, -0.28434547, 0.22245721, -0.18518199, -0.10943985, -0.35243174, 0.06897665, -0.03553363, -0.0701746 , -0.06037974, 0.37670779, -0.21684405]) olderr = np.seterr(invalid='ignore') warn_ctx = WarningManager() warn_ctx.__enter__() try: warnings.simplefilter('ignore', UserWarning) warnings.simplefilter('ignore', RuntimeWarning) stats.ksone.fit(d) finally: warn_ctx.__exit__() np.seterr(**olderr) def test_norm_logcdf(): # Test precision of the logcdf of the normal distribution. # This precision was enhanced in ticket 1614. x = -np.asarray(list(range(0, 120, 4))) # Values from R expected = [-0.69314718, -10.36010149, -35.01343716, -75.41067300, -131.69539607, -203.91715537, -292.09872100, -396.25241451, -516.38564863, -652.50322759, -804.60844201, -972.70364403, -1156.79057310, -1356.87055173, -1572.94460885, -1805.01356068, -2053.07806561, -2317.13866238, -2597.19579746, -2893.24984493, -3205.30112136, -3533.34989701, -3877.39640444, -4237.44084522, -4613.48339520, -5005.52420869, -5413.56342187, -5837.60115548, -6277.63751711, -6733.67260303] olderr = np.seterr(divide='ignore') try: assert_allclose(stats.norm().logcdf(x), expected, atol=1e-8) finally: np.seterr(**olderr) def test_hypergeom_interval_1802(): #these two had endless loops assert_equal(stats.hypergeom.interval(.95, 187601, 43192, 757), (152.0, 197.0)) assert_equal(stats.hypergeom.interval(.945, 187601, 43192, 757), (152.0, 197.0)) #this was working also before assert_equal(stats.hypergeom.interval(.94, 187601, 43192, 757), (153.0, 196.0)) #degenerate case .a == .b assert_equal(stats.hypergeom.ppf(0.02, 100, 100, 8), 8) assert_equal(stats.hypergeom.ppf(1, 100, 100, 8), 8) def test_distribution_too_many_args(): # Check that a TypeError is raised when too many args are given to a method # Regression test for ticket 1815. x = np.linspace(0.1, 0.7, num=5) assert_raises(TypeError, stats.gamma.pdf, x, 2, 3, loc=1.0) assert_raises(TypeError, stats.gamma.pdf, x, 2, 3, 4, loc=1.0) assert_raises(TypeError, stats.gamma.pdf, x, 2, 3, 4, 5) assert_raises(TypeError, stats.gamma.pdf, x, 2, 3, loc=1.0, scale=0.5) assert_raises(TypeError, stats.gamma.rvs, 2., 3, loc=1.0, scale=0.5) assert_raises(TypeError, stats.gamma.cdf, x, 2., 3, loc=1.0, scale=0.5) assert_raises(TypeError, stats.gamma.ppf, x, 2., 3, loc=1.0, scale=0.5) assert_raises(TypeError, stats.gamma.stats, 2., 3, loc=1.0, scale=0.5) assert_raises(TypeError, stats.gamma.entropy, 2., 3, loc=1.0, scale=0.5) assert_raises(TypeError, stats.gamma.fit, x, 2., 3, loc=1.0, scale=0.5) # These should not give errors stats.gamma.pdf(x, 2, 3) # loc=3 stats.gamma.pdf(x, 2, 3, 4) # loc=3, scale=4 stats.gamma.stats(2., 3) stats.gamma.stats(2., 3, 4) stats.gamma.stats(2., 3, 4, 'mv') stats.gamma.rvs(2., 3, 4, 5) stats.gamma.fit(stats.gamma.rvs(2., size=7), 2.) # Also for a discrete distribution stats.geom.pmf(x, 2, loc=3) # no error, loc=3 assert_raises(TypeError, stats.geom.pmf, x, 2, 3, 4) assert_raises(TypeError, stats.geom.pmf, x, 2, 3, loc=4) # And for distributions with 0, 2 and 3 args respectively assert_raises(TypeError, stats.expon.pdf, x, 3, loc=1.0) assert_raises(TypeError, stats.exponweib.pdf, x, 3, 4, 5, loc=1.0) assert_raises(TypeError, stats.exponweib.pdf, x, 3, 4, 5, 0.1, 0.1) assert_raises(TypeError, stats.ncf.pdf, x, 3, 4, 5, 6, loc=1.0) assert_raises(TypeError, stats.ncf.pdf, x, 3, 4, 5, 6, 1.0, scale=0.5) stats.ncf.pdf(x, 3, 4, 5, 6, 1.0) # 3 args, plus loc/scale def test_ncx2_tails_ticket_955(): # Trac #955 -- check that the cdf computed by special functions # matches the integrated pdf a = stats.ncx2.cdf(np.arange(20, 25, 0.2), 2, 1.07458615e+02) b = stats.ncx2.veccdf(np.arange(20, 25, 0.2), 2, 1.07458615e+02) assert_allclose(a, b, rtol=1e-3, atol=0) if __name__ == "__main__": run_module_suite()
sargas/scipy
scipy/stats/tests/test_distributions.py
Python
bsd-3-clause
42,192
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Generate keyboard layout and hotkey data for the keyboard overlay. This script fetches data from the keyboard layout and hotkey data spreadsheet, and output the data depending on the option. --cc: Rewrites a part of C++ code in chrome/browser/chromeos/webui/keyboard_overlay_ui.cc --grd: Rewrites a part of grd messages in chrome/app/generated_resources.grd --js: Rewrites the entire JavaScript code in chrome/browser/resources/keyboard_overlay/keyboard_overlay_data.js These options can be specified at the same time. e.g. python gen_keyboard_overlay_data.py --cc --grd --js The output directory of the generated files can be changed with --outdir. e.g. (This will generate tmp/keyboard_overlay.js) python gen_keyboard_overlay_data.py --outdir=tmp --js """ import cStringIO import datetime import gdata.spreadsheet.service import getpass import json import optparse import os import re import sys MODIFIER_SHIFT = 1 << 0 MODIFIER_CTRL = 1 << 1 MODIFIER_ALT = 1 << 2 KEYBOARD_GLYPH_SPREADSHEET_KEY = '0Ao3KldW9piwEdExLbGR6TmZ2RU9aUjFCMmVxWkVqVmc' HOTKEY_SPREADSHEET_KEY = '0AqzoqbAMLyEPdE1RQXdodk1qVkFyTWtQbUxROVM1cXc' CC_OUTDIR = 'chrome/browser/ui/webui/chromeos' CC_FILENAME = 'keyboard_overlay_ui.cc' GRD_OUTDIR = 'chrome/app' GRD_FILENAME = 'chromeos_strings.grdp' JS_OUTDIR = 'chrome/browser/resources/chromeos' JS_FILENAME = 'keyboard_overlay_data.js' CC_START = r'IDS_KEYBOARD_OVERLAY_INSTRUCTIONS_HIDE },' CC_END = r'};' GRD_START = r' <!-- BEGIN GENERATED KEYBOARD OVERLAY STRINGS -->' GRD_END = r' <!-- END GENERATED KEYBOARD OVERLAY STRINGS -->' LABEL_MAP = { 'glyph_arrow_down': 'down', 'glyph_arrow_left': 'left', 'glyph_arrow_right': 'right', 'glyph_arrow_up': 'up', 'glyph_back': 'back', 'glyph_backspace': 'backspace', 'glyph_brightness_down': 'bright down', 'glyph_brightness_up': 'bright up', 'glyph_enter': 'enter', 'glyph_forward': 'forward', 'glyph_fullscreen': 'full screen', # Kana/Eisu key on Japanese keyboard 'glyph_ime': u'\u304b\u306a\u0020\u002f\u0020\u82f1\u6570', 'glyph_lock': 'lock', 'glyph_overview': 'switch window', 'glyph_power': 'power', 'glyph_right': 'right', 'glyph_reload': 'reload', 'glyph_search': 'search', 'glyph_shift': 'shift', 'glyph_tab': 'tab', 'glyph_tools': 'tools', 'glyph_volume_down': 'vol. down', 'glyph_volume_mute': 'mute', 'glyph_volume_up': 'vol. up', }; INPUT_METHOD_ID_TO_OVERLAY_ID = { 'xkb:be::fra': 'fr', 'xkb:be::ger': 'de', 'xkb:be::nld': 'nl', 'xkb:bg::bul': 'bg', 'xkb:bg:phonetic:bul': 'bg', 'xkb:br::por': 'pt_BR', 'xkb:ca::fra': 'fr_CA', 'xkb:ca:eng:eng': 'ca', 'xkb:ch::ger': 'de', 'xkb:ch:fr:fra': 'fr', 'xkb:cz::cze': 'cs', 'xkb:de::ger': 'de', 'xkb:de:neo:ger': 'de_neo', 'xkb:dk::dan': 'da', 'xkb:ee::est': 'et', 'xkb:es::spa': 'es', 'xkb:es:cat:cat': 'ca', 'xkb:fi::fin': 'fi', 'xkb:fr::fra': 'fr', 'xkb:gb:dvorak:eng': 'en_GB_dvorak', 'xkb:gb:extd:eng': 'en_GB', 'xkb:gr::gre': 'el', 'xkb:hr::scr': 'hr', 'xkb:hu::hun': 'hu', 'xkb:il::heb': 'iw', 'xkb:it::ita': 'it', 'xkb:jp::jpn': 'ja', 'xkb:latam::spa': 'es_419', 'xkb:lt::lit': 'lt', 'xkb:lv:apostrophe:lav': 'lv', 'xkb:no::nob': 'no', 'xkb:pl::pol': 'pl', 'xkb:pt::por': 'pt_PT', 'xkb:ro::rum': 'ro', 'xkb:rs::srp': 'sr', 'xkb:ru::rus': 'ru', 'xkb:ru:phonetic:rus': 'ru', 'xkb:se::swe': 'sv', 'xkb:si::slv': 'sl', 'xkb:sk::slo': 'sk', 'xkb:tr::tur': 'tr', 'xkb:ua::ukr': 'uk', 'xkb:us::eng': 'en_US', 'xkb:us::fil': 'en_US', 'xkb:us::ind': 'en_US', 'xkb:us::msa': 'en_US', 'xkb:us:altgr-intl:eng': 'en_US_altgr_intl', 'xkb:us:colemak:eng': 'en_US_colemak', 'xkb:us:dvorak:eng': 'en_US_dvorak', 'xkb:us:intl:eng': 'en_US_intl', 'xkb:us:intl:nld': 'en_US_intl', 'xkb:us:intl:por': 'en_US_intl', 'xkb:us:workman:eng': 'en_US_workman', 'xkb:us:workman-intl:eng': 'en_US_workman_intl', } # The file was first generated in 2012 and we have a policy of not updating # copyright dates. COPYRIGHT_HEADER=\ """// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // This is a generated file but may contain local modifications. See // src/tools/gen_keyboard_overlay_data/gen_keyboard_overlay_data.py --help """ # A snippet for grd file GRD_SNIPPET_TEMPLATE=""" <message name="%s" desc="%s"> %s </message> """ # A snippet for C++ file CC_SNIPPET_TEMPLATE=""" { "%s", %s }, """ def SplitBehavior(behavior): """Splits the behavior to compose a message or i18n-content value. Examples: 'Activate last tab' => ['Activate', 'last', 'tab'] 'Close tab' => ['Close', 'tab'] """ return [x for x in re.split('[ ()"-.,]', behavior) if len(x) > 0] def ToMessageName(behavior): """Composes a message name for grd file. Examples: 'Activate last tab' => IDS_KEYBOARD_OVERLAY_ACTIVATE_LAST_TAB 'Close tab' => IDS_KEYBOARD_OVERLAY_CLOSE_TAB """ segments = [segment.upper() for segment in SplitBehavior(behavior)] return 'IDS_KEYBOARD_OVERLAY_' + ('_'.join(segments)) def ToMessageDesc(description): """Composes a message description for grd file.""" message_desc = 'The text in the keyboard overlay to explain the shortcut' if description: message_desc = '%s (%s).' % (message_desc, description) else: message_desc += '.' return message_desc def Toi18nContent(behavior): """Composes a i18n-content value for HTML/JavaScript files. Examples: 'Activate last tab' => keyboardOverlayActivateLastTab 'Close tab' => keyboardOverlayCloseTab """ segments = [segment.lower() for segment in SplitBehavior(behavior)] result = 'keyboardOverlay' for segment in segments: result += segment[0].upper() + segment[1:] return result def ToKeys(hotkey): """Converts the action value to shortcut keys used from JavaScript. Examples: 'Ctrl - 9' => '9<>CTRL' 'Ctrl - Shift - Tab' => 'tab<>CTRL<>SHIFT' """ values = hotkey.split(' - ') modifiers = sorted(value.upper() for value in values if value in ['Shift', 'Ctrl', 'Alt', 'Search']) keycode = [value.lower() for value in values if value not in ['Shift', 'Ctrl', 'Alt', 'Search']] # The keys which are highlighted even without modifier keys. base_keys = ['backspace', 'power'] if not modifiers and (keycode and keycode[0] not in base_keys): return None return '<>'.join(keycode + modifiers) def ParseOptions(): """Parses the input arguemnts and returns options.""" # default_username = os.getusername() + '@google.com'; default_username = '%s@google.com' % os.environ.get('USER') parser = optparse.OptionParser() parser.add_option('--key', dest='key', help='The key of the spreadsheet (required).') parser.add_option('--username', dest='username', default=default_username, help='Your user name (default: %s).' % default_username) parser.add_option('--password', dest='password', help='Your password.') parser.add_option('--account_type', default='GOOGLE', dest='account_type', help='Account type used for gdata login (default: GOOGLE)') parser.add_option('--js', dest='js', default=False, action='store_true', help='Output js file.') parser.add_option('--grd', dest='grd', default=False, action='store_true', help='Output resource file.') parser.add_option('--cc', dest='cc', default=False, action='store_true', help='Output cc file.') parser.add_option('--outdir', dest='outdir', default=None, help='Specify the directory files are generated.') (options, unused_args) = parser.parse_args() if not options.username.endswith('google.com'): print 'google.com account is necessary to use this script.' sys.exit(-1) if (not (options.js or options.grd or options.cc)): print 'Either --js, --grd, or --cc needs to be specified.' sys.exit(-1) # Get the password from the terminal, if needed. if not options.password: options.password = getpass.getpass( 'Application specific password for %s: ' % options.username) return options def InitClient(options): """Initializes the spreadsheet client.""" client = gdata.spreadsheet.service.SpreadsheetsService() client.email = options.username client.password = options.password client.source = 'Spread Sheet' client.account_type = options.account_type print 'Logging in as %s (%s)' % (client.email, client.account_type) client.ProgrammaticLogin() return client def PrintDiffs(message, lhs, rhs): """Prints the differences between |lhs| and |rhs|.""" dif = set(lhs).difference(rhs) if dif: print message, ', '.join(dif) def FetchSpreadsheetFeeds(client, key, sheets, cols): """Fetch feeds from the spreadsheet. Args: client: A spreadsheet client to be used for fetching data. key: A key string of the spreadsheet to be fetched. sheets: A list of the sheet names to read data from. cols: A list of columns to read data from. """ worksheets_feed = client.GetWorksheetsFeed(key) print 'Fetching data from the worksheet: %s' % worksheets_feed.title.text worksheets_data = {} titles = [] for entry in worksheets_feed.entry: worksheet_id = entry.id.text.split('/')[-1] list_feed = client.GetListFeed(key, worksheet_id) list_data = [] # Hack to deal with sheet names like 'sv (Copy of fl)' title = list_feed.title.text.split('(')[0].strip() titles.append(title) if title not in sheets: continue print 'Reading data from the sheet: %s' % list_feed.title.text for i, entry in enumerate(list_feed.entry): line_data = {} for k in entry.custom: if (k not in cols) or (not entry.custom[k].text): continue line_data[k] = entry.custom[k].text list_data.append(line_data) worksheets_data[title] = list_data PrintDiffs('Exist only on the spreadsheet: ', titles, sheets) PrintDiffs('Specified but do not exist on the spreadsheet: ', sheets, titles) return worksheets_data def FetchKeyboardGlyphData(client): """Fetches the keyboard glyph data from the spreadsheet.""" glyph_cols = ['scancode', 'p0', 'p1', 'p2', 'p3', 'p4', 'p5', 'p6', 'p7', 'p8', 'p9', 'label', 'format', 'notes'] keyboard_glyph_data = FetchSpreadsheetFeeds( client, KEYBOARD_GLYPH_SPREADSHEET_KEY, INPUT_METHOD_ID_TO_OVERLAY_ID.values(), glyph_cols) ret = {} for lang in keyboard_glyph_data: ret[lang] = {} keys = {} for line in keyboard_glyph_data[lang]: scancode = line.get('scancode') if (not scancode) and line.get('notes'): ret[lang]['layoutName'] = line['notes'] continue del line['scancode'] if 'notes' in line: del line['notes'] if 'label' in line: line['label'] = LABEL_MAP.get(line['label'], line['label']) keys[scancode] = line # Add a label to space key if '39' not in keys: keys['39'] = {'label': 'space'} ret[lang]['keys'] = keys return ret def FetchLayoutsData(client): """Fetches the keyboard glyph data from the spreadsheet.""" layout_names = ['U_layout', 'J_layout', 'E_layout', 'B_layout'] cols = ['scancode', 'x', 'y', 'w', 'h'] layouts = FetchSpreadsheetFeeds(client, KEYBOARD_GLYPH_SPREADSHEET_KEY, layout_names, cols) ret = {} for layout_name, layout in layouts.items(): ret[layout_name[0]] = [] for row in layout: line = [] for col in cols: value = row.get(col) if not value: line.append('') else: if col != 'scancode': value = float(value) line.append(value) ret[layout_name[0]].append(line) return ret def FetchHotkeyData(client): """Fetches the hotkey data from the spreadsheet.""" hotkey_sheet = ['Cross Platform Behaviors'] hotkey_cols = ['behavior', 'context', 'kind', 'actionctrlctrlcmdonmac', 'chromeos', 'descriptionfortranslation'] hotkey_data = FetchSpreadsheetFeeds(client, HOTKEY_SPREADSHEET_KEY, hotkey_sheet, hotkey_cols) action_to_id = {} id_to_behavior = {} # (behavior, action) result = [] for line in hotkey_data['Cross Platform Behaviors']: if (not line.get('chromeos')) or (line.get('kind') != 'Key'): continue action = ToKeys(line['actionctrlctrlcmdonmac']) if not action: continue behavior = line['behavior'].strip() description = line.get('descriptionfortranslation') result.append((behavior, action, description)) return result def UniqueBehaviors(hotkey_data): """Retrieves a sorted list of unique behaviors from |hotkey_data|.""" return sorted(set((behavior, description) for (behavior, _, description) in hotkey_data), cmp=lambda x, y: cmp(ToMessageName(x[0]), ToMessageName(y[0]))) def GetPath(path_from_src): """Returns the absolute path of the specified path.""" path = os.path.join(os.path.dirname(__file__), '../..', path_from_src) if not os.path.isfile(path): print 'WARNING: %s does not exist. Maybe moved or renamed?' % path return path def OutputFile(outpath, snippet): """Output the snippet into the specified path.""" out = file(outpath, 'w') out.write(COPYRIGHT_HEADER + '\n') out.write(snippet) print 'Output ' + os.path.normpath(outpath) def RewriteFile(start, end, original_dir, original_filename, snippet, outdir=None): """Replaces a part of the specified file with snippet and outputs it.""" original_path = GetPath(os.path.join(original_dir, original_filename)) original = file(original_path, 'r') original_content = original.read() original.close() if outdir: outpath = os.path.join(outdir, original_filename) else: outpath = original_path out = file(outpath, 'w') rx = re.compile(r'%s\n.*?%s\n' % (re.escape(start), re.escape(end)), re.DOTALL) new_content = re.sub(rx, '%s\n%s%s\n' % (start, snippet, end), original_content) out.write(new_content) out.close() print 'Output ' + os.path.normpath(outpath) def OutputJson(keyboard_glyph_data, hotkey_data, layouts, var_name, outdir): """Outputs the keyboard overlay data as a JSON file.""" action_to_id = {} for (behavior, action, _) in hotkey_data: i18nContent = Toi18nContent(behavior) action_to_id[action] = i18nContent data = {'keyboardGlyph': keyboard_glyph_data, 'shortcut': action_to_id, 'layouts': layouts, 'inputMethodIdToOverlayId': INPUT_METHOD_ID_TO_OVERLAY_ID} if not outdir: outdir = JS_OUTDIR outpath = GetPath(os.path.join(outdir, JS_FILENAME)) json_data = json.dumps(data, sort_keys=True, indent=2) # Remove redundant spaces after ',' json_data = json_data.replace(', \n', ',\n') # Replace double quotes with single quotes to avoid lint warnings. json_data = json_data.replace('\"', '\'') snippet = 'var %s = %s;\n' % (var_name, json_data) OutputFile(outpath, snippet) def OutputGrd(hotkey_data, outdir): """Outputs a part of messages in the grd file.""" snippet = cStringIO.StringIO() for (behavior, description) in UniqueBehaviors(hotkey_data): # Do not generate message for 'Show wrench menu'. It is handled manually # based on branding. if behavior == 'Show wrench menu': continue snippet.write(GRD_SNIPPET_TEMPLATE % (ToMessageName(behavior), ToMessageDesc(description), behavior)) RewriteFile(GRD_START, GRD_END, GRD_OUTDIR, GRD_FILENAME, snippet.getvalue(), outdir) def OutputCC(hotkey_data, outdir): """Outputs a part of code in the C++ file.""" snippet = cStringIO.StringIO() for (behavior, _) in UniqueBehaviors(hotkey_data): message_name = ToMessageName(behavior) output = CC_SNIPPET_TEMPLATE % (Toi18nContent(behavior), message_name) # Break the line if the line is longer than 80 characters if len(output) > 80: output = output.replace(' ' + message_name, '\n %s' % message_name) snippet.write(output) RewriteFile(CC_START, CC_END, CC_OUTDIR, CC_FILENAME, snippet.getvalue(), outdir) def main(): options = ParseOptions() client = InitClient(options) hotkey_data = FetchHotkeyData(client) if options.js: keyboard_glyph_data = FetchKeyboardGlyphData(client) if options.js: layouts = FetchLayoutsData(client) OutputJson(keyboard_glyph_data, hotkey_data, layouts, 'keyboardOverlayData', options.outdir) if options.grd: OutputGrd(hotkey_data, options.outdir) if options.cc: OutputCC(hotkey_data, options.outdir) if __name__ == '__main__': main()
ds-hwang/chromium-crosswalk
tools/gen_keyboard_overlay_data/gen_keyboard_overlay_data.py
Python
bsd-3-clause
17,124
# uncompyle6 version 2.9.10 # Python bytecode 2.7 (62211) # Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10) # [GCC 6.2.0 20161005] # Embedded file name: Dialog.py from Tkinter import * from Tkinter import _cnfmerge if TkVersion <= 3.6: DIALOG_ICON = 'warning' else: DIALOG_ICON = 'questhead' class Dialog(Widget): def __init__(self, master=None, cnf={}, **kw): cnf = _cnfmerge((cnf, kw)) self.widgetName = '__dialog__' Widget._setup(self, master, cnf) self.num = self.tk.getint(self.tk.call('tk_dialog', self._w, cnf['title'], cnf['text'], cnf['bitmap'], cnf['default'], *cnf['strings'])) try: Widget.destroy(self) except TclError: pass def destroy(self): pass def _test(): d = Dialog(None, {'title': 'File Modified','text': 'File "Python.h" has been modified since the last time it was saved. Do you want to save it before exiting the application.', 'bitmap': DIALOG_ICON, 'default': 0, 'strings': ('Save File', 'Discard Changes', 'Return to Editor') }) print d.num return if __name__ == '__main__': t = Button(None, {'text': 'Test','command': _test, Pack: {}}) q = Button(None, {'text': 'Quit','command': t.quit, Pack: {}}) t.mainloop()
DarthMaulware/EquationGroupLeaks
Leak #5 - Lost In Translation/windows/Resources/Python/Core/Lib/lib-tk/Dialog.py
Python
unlicense
1,325
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2015 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: Emanuel Cino <ecino@compassion.ch> # # The licence is in the file __manifest__.py # ############################################################################## from . import b2s_image
eicher31/compassion-modules
sbc_compassion/controllers/__init__.py
Python
agpl-3.0
426
from distutils.core import setup setup(name='thonmux', packages=['thonmux'], version='0.0.6', description='Interact with tmux in a pythonic way', author='Gabriel Lima', author_email='ewilazarus@gmail.com', url='https://github.com/ewilazarus/thonmux', download_url='https://github.com/ewilazarus/thonmux/tarball/0.0.6', keywords=['tmux', 'api'], classifiers=[])
ewilazarus/thonmux
setup.py
Python
mit
416
class BundleConfiguration(object): def PIPELINE_CSS(self): return { 'client': { 'source_filenames': [ 'font-awesome/css/font-awesome.css', 'css/client.less', ], 'output_filename': 'css/client.css', }, } def PIPELINE_JS(self): return { 'client': { 'source_filenames': [ 'js/client.browserify.js', ], 'output_filename': 'js/client.js', }, }
mythmon/edwin
edwin/bundles.py
Python
mpl-2.0
584
# -*- coding: utf-8 -*- """ Framework-independant wrappers. Those functions are proxies for the framework's functions. """ from __future__ import absolute_import, unicode_literals, print_function import chardet def _is_flask(): try: from flask import current_app current_app.name except (ImportError, RuntimeError): return False else: return True def _is_pyramid(): return False # TODO: implement this FRAMEWORK = None def framework_name(): if FRAMEWORK is None: if _is_flask(): FRAMEWORK = "flask" elif _is_pyramid(): FRAMEWORK = "pyramid" raise RuntimeError("Unknown Framework") return FRAMEWORK class Result: def __init__(self, context=None, code=200): self.context = context or {} self.flash = [] # `redirect` must be None or a tuple: (view_name, view_kwargs) self.redirect = None self.code = code def redirect_to_url(url): if framework_name() == "flask": import flask return flask.redirect(url) if framework_name() == "pyramid": from pyramid.httpexceptions import HTTPFound return HTTPFound(location=url) def decode(data, encoding=None): if isinstance(data, unicode): return data if encoding: return data.decode(encoding) try: return data.decode('utf-8') except UnicodeDecodeError: encoding = chardet.detect(data)['encoding'] if not encoding: return "(Binary data)" return data.decode(encoding)
vivekanand1101/fresque
fresque/lib/utils.py
Python
agpl-3.0
1,578
#!/usr/bin/env python import codecs import imp import os from setuptools import find_packages, setup ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__))) app = imp.load_source('s_analyzer', os.path.join(ROOT, 'src', 's_analyzer', '__init__.py')) def read(*files): content = [] for f in files: content.extend(codecs.open(os.path.join(ROOT, 'src', 'requirements', f), 'r').read().split()) return content django_requires = read('django.pip') tests_requires = read('testing.pip') install_requires = read('install.any.pip') dev_requires = install_requires + django_requires + tests_requires + read('develop.pip') setup(name=app.NAME, version=app.get_version(), url='http://pypi.python.org/pypi/%s/' % app.NAME, author='Alessio Iacarelli', author_email='iaga84@gmail.com', license="MIT License", description='Securities Analyzer', package_dir={'': 'src'}, packages=find_packages('src'), include_package_data=True, install_requires=install_requires, tests_require=tests_requires, extras_require={ 'dev': dev_requires, 'test': tests_requires, 'django': django_requires, }, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Intended Audience :: Developers' ] )
iaga84/securities-analyzer
setup.py
Python
mit
1,471
import sys from modelBuild import * from constants import * from PyQt4.QtGui import QPixmap, QImage, QPen, QGraphicsPixmapItem, QGraphicsLineItem from PyQt4.QtCore import pyqtSignal from kkitUtil import * from setsolver import * from PyQt4 import QtSvg from moose import utils class GraphicalView(QtGui.QGraphicsView): def __init__(self, modelRoot,parent,border,layoutPt,createdItem): QtGui.QGraphicsView.__init__(self,parent) self.state = None self.move = False self.resetState() self.connectionSign = None self.connectionSource = None self.expectedConnection = None self.selections = [] self.connector = None self.connectionSignImagePath = "../gui/icons/connection.png" self.connectionSignImage = QImage(self.connectionSignImagePath) # self.expectedConnectionPen = QPen() self.setScene(parent) self.modelRoot = modelRoot self.sceneContainerPt = parent self.setDragMode(QtGui.QGraphicsView.RubberBandDrag) self.itemSelected = False self.customrubberBand = None self.rubberbandWidth = 0 self.rubberbandHeight = 0 self.moved = False self.showpopupmenu = False self.popupmenu4rlines = True self.border = 6 self.setRenderHints(QtGui.QPainter.Antialiasing) self.layoutPt = layoutPt # All the object which are stacked on the scene are listed self.stackOrder = self.sceneContainerPt.items(Qt.Qt.DescendingOrder) #From stackOrder selecting only compartment self.cmptStackorder = [i for i in self.stackOrder if isinstance(i,ComptItem)] self.viewBaseType = " " self.iconScale = 1 self.arrowsize = 2 self.defaultComptsize = 5 self.connectorlist = {"plot": None ,"clone": None,"move": None,"delete": None} self.setHorizontalScrollBarPolicy(PyQt4.QtCore.Qt.ScrollBarAlwaysOn) self.setVerticalScrollBarPolicy(PyQt4.QtCore.Qt.ScrollBarAlwaysOn) def setRefWidget(self,path): self.viewBaseType = path def resizeEvent(self, event): # print event.size().width(),event.size().height() self.fitInView(self.sceneContainerPt.itemsBoundingRect().x()-10,self.sceneContainerPt.itemsBoundingRect().y()-10,self.sceneContainerPt.itemsBoundingRect().width()+20,self.sceneContainerPt.itemsBoundingRect().height()+20,Qt.Qt.IgnoreAspectRatio) #print("Called =>", event) return def resolveCompartmentInteriorAndBoundary(self, item, position): bound = item.rect().adjusted(3,3,-3,-3) return COMPARTMENT_INTERIOR if bound.contains(item.mapFromScene(position)) else COMPARTMENT_BOUNDARY def resetState(self): self.state = { "press" : { "mode" : INVALID , "item" : None , "sign" : None , "pos" : None } , "move" : { "happened": False } , "release" : { "mode" : INVALID , "item" : None , "sign" : None } } def resolveItem(self, items, position): solution = None for item in items: if hasattr(item, "name"): #print(item.name) if item.name == ITEM: return (item, ITEM) if item.name == COMPARTMENT: solution = (item, self.resolveCompartmentInteriorAndBoundary(item, position)) for item in items: # if isinstance(item, QtGui.QGraphicsPixmapItem): # return (item, CONNECTOR) if isinstance(item, QtSvg.QGraphicsSvgItem): return (item, CONNECTOR) if isinstance(item, QtGui.QGraphicsPolygonItem): return (item, CONNECTION) if solution is None: return (None, EMPTY) return solution def editorMousePressEvent(self, event): # self.deselectSelections() # if self.state["press"]["item"] is not None: # self.state["press"]["item"].setSelected(False) # self.resetState() if event.buttons() == QtCore.Qt.LeftButton: self.clickPosition = self.mapToScene(event.pos()) (item, itemType) = self.resolveItem(self.items(event.pos()), self.clickPosition) self.state["press"]["mode"] = VALID self.state["press"]["item"] = item self.state["press"]["type"] = itemType self.state["press"]["pos"] = event.pos() #If connector exist and if mousePress on Compartment interior, # then removing any connect if exist if itemType == COMPARTMENT_INTERIOR: self.removeConnector() elif itemType == ITEM: self.showConnector(self.state["press"]["item"]) # self.layoutPt.plugin.mainWindow.objectEditSlot(self.state["press"]["item"].mobj, False) else: self.resetState() comptList = [] for k, v in self.layoutPt.qGraCompt.items(): comptList.append(v) if len(comptList) > 1: popupmenu = QtGui.QMenu('PopupMenu', self) popupmenu.addAction("LinearLayout", lambda : handleCollisions(comptList, moveX, self.layoutPt)) popupmenu.addAction("VerticalLayout" ,lambda : handleCollisions(comptList, moveMin, self.layoutPt )) popupmenu.exec_(self.mapToGlobal(event.pos())) def editorMouseMoveEvent(self, event): if self.state["press"]["mode"] == INVALID: self.state["move"]["happened"] = False return # if self.move: # initial = self.mapToScene(self.state["press"]["pos"]) # final = self.mapToScene(event.pos()) # displacement = final - initial # #print("Displacement", displacement) # for item in self.selectedItems: # if isinstance(item, KineticsDisplayItem) and not isinstance(item,ComptItem) and not isinstance(item,CplxItem): # item.moveBy(displacement.x(), displacement.y()) # self.layoutPt.positionChange(item.mobj.path) # self.state["press"]["pos"] = event.pos() # return self.state["move"]["happened"] = True itemType = self.state["press"]["type"] item = self.state["press"]["item"] if itemType == CONNECTOR: ''' connecting 2 object is removed and movement is impled''' actionType = str(item.data(0).toString()) if actionType == "move": QtGui.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CrossCursor)) initial = item.parent().pos() final = self.mapToScene(event.pos()) displacement = final-initial if not isinstance(item.parent(),FuncItem) and not isinstance(item.parent(),CplxItem): self.removeConnector() item.parent().moveBy(displacement.x(), displacement.y()) if isinstance(item.parent(),PoolItem): for funcItem in item.parent().childItems(): if isinstance(funcItem,FuncItem): self.layoutPt.updateArrow(funcItem) self.state["press"]["pos"] = event.pos() self.layoutPt.positionChange(item.parent().mobj) if actionType == "clone": pixmap = QtGui.QPixmap(24, 24) pixmap.fill(QtCore.Qt.transparent) painter = QtGui.QPainter() painter.begin(pixmap) painter.setRenderHints(painter.Antialiasing) pen = QtGui.QPen(QtGui.QBrush(QtGui.QColor("black")), 1) pen.setWidthF(1.5) painter.setPen(pen) painter.drawLine(12,7,12,17) painter.drawLine(7,12,17,12) painter.end() #self.setCursor(QtGui.QCursor(pixmap)) QtGui.QApplication.setOverrideCursor(QtGui.QCursor(pixmap)) if itemType == ITEM: self.drawExpectedConnection(event) if itemType == COMPARTMENT_BOUNDARY: initial = self.mapToScene(self.state["press"]["pos"]) final = self.mapToScene(event.pos()) displacement = final - initial item.moveBy(displacement.x(), displacement.y()) self.layoutPt.positionChange(item.mobj.path) self.state["press"]["pos"] = event.pos() if itemType == COMPARTMENT_INTERIOR: if self.customrubberBand == None: self.customrubberBand = QtGui.QRubberBand(QtGui.QRubberBand.Rectangle,self) self.customrubberBand.show() startingPosition = self.state["press"]["pos"] endingPosition = event.pos() displacement = endingPosition - startingPosition x0 = startingPosition.x() x1 = endingPosition.x() y0 = startingPosition.y() y1 = endingPosition.y() if displacement.x() < 0 : x0,x1= x1,x0 if displacement.y() < 0 : y0,y1= y1,y0 self.customrubberBand.setGeometry(QtCore.QRect(QtCore.QPoint(x0, y0), QtCore.QSize(abs(displacement.x()), abs(displacement.y())))) # if itemType == COMPARTMENT: # rubberband selection # if itemType == COMPARTMENT_BOUNDARY: # if itemType == ITEM: # dragging the item def editorMouseReleaseEvent(self, event): if self.move: self.move = False self.setCursor(Qt.Qt.ArrowCursor) if self.state["press"]["mode"] == INVALID: self.state["release"]["mode"] = INVALID self.resetState() return self.clickPosition = self.mapToScene(event.pos()) (item, itemType) = self.resolveItem(self.items(event.pos()), self.clickPosition) self.state["release"]["mode"] = VALID self.state["release"]["item"] = item self.state["release"]["type"] = itemType clickedItemType = self.state["press"]["type"] if clickedItemType == ITEM: if not self.state["move"]["happened"]: self.showConnector(self.state["press"]["item"]) self.layoutPt.plugin.mainWindow.objectEditSlot(self.state["press"]["item"].mobj, True) # compartment's rectangle size is calculated depending on children #self.layoutPt.comptChilrenBoundingRect() l = self.modelRoot if self.modelRoot.find('/',1) > 0: l = self.modelRoot[0:self.modelRoot.find('/',1)] linfo = moose.Annotator(l+'/info') for k, v in self.layoutPt.qGraCompt.items(): rectcompt = v.childrenBoundingRect() if linfo.modeltype == "new_kkit": #if newly built model then compartment is size is fixed for some size. comptBoundingRect = v.boundingRect() if not comptBoundingRect.contains(rectcompt): self.layoutPt.updateCompartmentSize(v) else: #if already built model then compartment size depends on max and min objects v.setRect(rectcompt.x()-10,rectcompt.y()-10,(rectcompt.width()+20),(rectcompt.height()+20)) else: if isinstance(self.state["release"]["item"], KineticsDisplayItem): if not moose.element(self.state["press"]["item"].mobj) == moose.element(self.state["release"]["item"].mobj): self.populate_srcdes( self.state["press"]["item"].mobj , self.state["release"]["item"].mobj ) else: pass self.removeExpectedConnection() self.removeConnector() if clickedItemType == CONNECTOR: actionType = str(self.state["press"]["item"].data(0).toString()) if actionType == "move": QtGui.QApplication.setOverrideCursor(QtGui.QCursor(Qt.Qt.ArrowCursor)) if actionType == "delete": self.removeConnector() pixmap = QtGui.QPixmap(24, 24) pixmap.fill(QtCore.Qt.transparent) painter = QtGui.QPainter() painter.begin(pixmap) painter.setRenderHints(painter.Antialiasing) pen = QtGui.QPen(QtGui.QBrush(QtGui.QColor("black")), 1) pen.setWidthF(1.5) painter.setPen(pen) painter.drawLine(8,8,16,16) painter.drawLine(8,16,16,8) painter.end() QtGui.QApplication.setOverrideCursor(QtGui.QCursor(pixmap)) reply = QtGui.QMessageBox.question(self, "Deleting Object","Do want to delete object and its connections", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No) if reply == QtGui.QMessageBox.Yes: #delete solver first as topology is changing deleteSolver(self.modelRoot) self.deleteObj([item.parent()]) QtGui.QApplication.restoreOverrideCursor() else: QtGui.QApplication.restoreOverrideCursor() elif actionType == "plot": element = moose.element(item.parent().mobj.path) if isinstance (element,moose.PoolBase): self.graph = moose.element(self.modelRoot+'/data/graph_0') tablePath = utils.create_table_path(moose.element(self.modelRoot), self.graph, element, "Conc") table = utils.create_table(tablePath, element, "Conc","Table2") elif actionType == "clone": if self.state["move"]["happened"]: QtGui.QApplication.setOverrideCursor(QtGui.QCursor(Qt.Qt.ArrowCursor)) self.state["press"]["item"].parent().mobj cloneObj = self.state["press"]["item"] posWrtComp = self.mapToScene(event.pos()) itemAtView = self.sceneContainerPt.itemAt(self.mapToScene(event.pos())) self.removeConnector() if isinstance(itemAtView,ComptItem): #Solver should be deleted ## if there is change in 'Topology' of the model ## or if copy has to made then oject should be in unZombify mode deleteSolver(self.modelRoot) lKey = [key for key, value in self.layoutPt.qGraCompt.iteritems() if value == itemAtView][0] iR = 0 iP = 0 t = moose.element(cloneObj.parent().mobj) name = t.name if isinstance(cloneObj.parent().mobj,PoolBase): retValue = self.objExist(lKey.path,name,iP) if retValue != None: name += retValue pmooseCp = moose.copy(t,lKey.path,name,1) #if moose.copy failed then check for path != '/' if pmooseCp.path != '/': ct = moose.element(pmooseCp) concInit = pmooseCp.concInit[0] #this is b'cos if pool copied across the comptartment, #then it doesn't calculate nInit according but if one set #concInit then it would, just a hack ct.concInit = concInit #itemAtView = self.state["release"]["item"] poolObj = moose.element(ct) poolinfo = moose.element(poolObj.path+'/info') qGItem =PoolItem(poolObj,itemAtView) self.layoutPt.mooseId_GObj[poolObj] = qGItem bgcolor = getRandColor() color,bgcolor = getColor(poolinfo) qGItem.setDisplayProperties(posWrtComp.x(),posWrtComp.y(),color,bgcolor) self.emit(QtCore.SIGNAL("dropped"),poolObj) if isinstance(cloneObj.parent().mobj,ReacBase): retValue = self.objExist(lKey.path,name,iR) if retValue != None : name += retValue rmooseCp = moose.copy(t,lKey.path,name,1) if rmooseCp.path != '/': ct = moose.element(rmooseCp) #itemAtView = self.state["release"]["item"] reacObj = moose.element(ct) reacinfo = moose.Annotator(reacObj.path+'/info') qGItem = ReacItem(reacObj,itemAtView) self.layoutPt.mooseId_GObj[reacObj] = qGItem posWrtComp = self.mapToScene(event.pos()) qGItem.setDisplayProperties(posWrtComp.x(),posWrtComp.y(),"white", "white") self.emit(QtCore.SIGNAL("dropped"),reacObj) else: if itemAtView == None: QtGui.QMessageBox.information(None,'Dropping Not possible ','Dropping not allowed outside the compartment',QtGui.QMessageBox.Ok) else: srcdesString = ((self.state["release"]["item"]).mobj).className QtGui.QMessageBox.information(None,'Dropping Not possible','Dropping on \'{srcdesString}\' not allowed'.format(srcdesString = srcdesString),QtGui.QMessageBox.Ok) if clickedItemType == CONNECTION: popupmenu = QtGui.QMenu('PopupMenu', self) popupmenu.addAction("Delete", lambda : self.deleteConnection(item)) popupmenu.exec_(self.mapToGlobal(event.pos())) if clickedItemType == COMPARTMENT_BOUNDARY: if not self.state["move"]["happened"]: self.layoutPt.plugin.mainWindow.objectEditSlot(self.state["press"]["item"].mobj, True) self.resetState() if clickedItemType == COMPARTMENT_INTERIOR: if self.state["move"]["happened"]: startingPosition = self.state["press"]["pos"] endingPosition = event.pos() displacement = endingPosition - startingPosition x0 = startingPosition.x() x1 = endingPosition.x() y0 = startingPosition.y() y1 = endingPosition.y() if displacement.x() < 0 : x0,x1= x1,x0 if displacement.y() < 0 : y0,y1= y1,y0 #print "kkitview COMPARTMENT_INTERIOR",x0,y0 self.selectedItems = selectedItems = self.items(x0,y0,abs(displacement.x()), abs(displacement.y())) # print("Rect => ", self.customrubberBand.rect()) # selectedItems = self.items(self.mapToScene(self.customrubberBand.rect()).boundingRect()) self.selectSelections(selectedItems) for item in selectedItems: if isinstance(item, KineticsDisplayItem) and not isinstance(item,ComptItem): item.setSelected(True) #print("Rubberband Selections => ", self.selections) self.customrubberBand.hide() self.customrubberBand = None popupmenu = QtGui.QMenu('PopupMenu', self) popupmenu.addAction("Delete", lambda : self.deleteSelections(x0,y0,x1,y1)) popupmenu.addAction("Zoom", lambda : self.zoomSelections(x0,y0,x1,y1)) popupmenu.addAction("Move", lambda : self.moveSelections()) popupmenu.exec_(self.mapToGlobal(event.pos())) # self.delete = QtGui.QAction(self.tr('delete'), self) # self.connect(self.delete, QtCore.SIGNAL('triggered()'), self.deleteItems) # self.zoom = QtGui.QAction(self.tr('zoom'), self) # self.connect(self.zoom, QtCore.SIGNAL('triggered()'), self.zoomItem) # self.move = QtGui.QAction(self.tr('move'), self) # self.connect(self.move, QtCore.SIGNAL('triggered()'), self.moveItem) # else: # self.layoutPt.plugin.mainWindow.objectEditSlot(self.state["press"]["item"].mobj, True) self.resetState() def drawExpectedConnection(self, event): self.connectionSource = self.state["press"]["item"] sourcePoint = self.connectionSource.mapToScene( self.connectionSource.boundingRect().center() ) destinationPoint = self.mapToScene(event.pos()) if self.expectedConnection is None: self.expectedConnection = QGraphicsLineItem( sourcePoint.x() , sourcePoint.y() , destinationPoint.x() , destinationPoint.y() ) self.expectedConnection.setPen(QPen(Qt.Qt.DashLine)) self.sceneContainerPt.addItem(self.expectedConnection) else: self.expectedConnection.setLine( sourcePoint.x() , sourcePoint.y() , destinationPoint.x() , destinationPoint.y() ) ''' print " drawExpectedConnection ()() ",self.state["item"]["press"].mobj sourcePoint = self.connectionSource.mapToScene( self.connectionSource.boundingRect().center() ) destinationPoint = self.mapToScene(event.pos()) if self.expectedConnection is None: self.expectedConnection = QGraphicsLineItem( sourcePoint.x() , sourcePoint.y() , destinationPoint.x() , destinationPoint.y() ) self.expectedConnection.setPen(QPen(Qt.Qt.DashLine)) self.sceneContainerPt.addItem(self.expectedConnection) else: self.expectedConnection.setLine( sourcePoint.x() , sourcePoint.y() , destinationPoint.x() , destinationPoint.y() ) ''' def removeExpectedConnection(self): #print("removeExpectedConnection") self.sceneContainerPt.removeItem(self.expectedConnection) self.expectedConnection = None self.connectionSource = None def removeConnector(self): try: for l,k in self.connectorlist.items(): if k is not None: self.sceneContainerPt.removeItem(k) self.connectorlist[l] = None ''' if self.connectionSign is not None: # self.sceneContainerPt.removeItem(self.connectionSign) # self.connectionSign = None ''' except: #print("Exception received!") pass # if self.connectionSign is not None: # print "self.connectionSign ",self.connectionSign # self.sceneContainerPt.removeItem(self.connectionSign) # self.connectionSign = None def showConnector(self, item): self.removeConnector() self.connectionSource = item rectangle = item.boundingRect() for l in self.connectorlist.keys(): self.xDisp = 0 self.yDisp = 0 self.connectionSign = None if isinstance(item.mobj,PoolBase) or isinstance(item.mobj,ReacBase): if l == "clone": self.connectionSign = QtSvg.QGraphicsSvgItem('icons/clone.svg') self.connectionSign.setData(0, QtCore.QVariant("clone")) self.connectionSign.setParent(self.connectionSource) self.connectionSign.setScale( (1.0 * rectangle.height()) / self.connectionSign.boundingRect().height() ) position = item.mapToParent(rectangle.bottomLeft()) self.xDisp = 15 self.yDisp = 2 self.connectionSign.setToolTip("Click and drag to clone the object") self.connectorlist["clone"] = self.connectionSign if isinstance(item.mobj,PoolBase): if l == "plot": self.connectionSign = QtSvg.QGraphicsSvgItem('icons/plot.svg') self.connectionSign.setData(0, QtCore.QVariant("plot")) self.connectionSign.setParent(self.connectionSource) self.connectionSign.setScale( (1.0 * rectangle.height()) / self.connectionSign.boundingRect().height() ) position = item.mapToParent(rectangle.topLeft()) self.xDisp = 15 self.yDisp = 0 self.connectionSign.setToolTip("plot the object") self.connectorlist["plot"] = self.connectionSign if l == "move": self.connectionSign = QtSvg.QGraphicsSvgItem('icons/move.svg') self.connectionSign.setData(0, QtCore.QVariant("move")) self.connectionSign.setParent(self.connectionSource) self.connectionSign.setToolTip("Drag to connect.") self.connectionSign.setScale( (1.0 * rectangle.height()) / self.connectionSign.boundingRect().height() ) position = item.mapToParent(rectangle.topRight()) self.connectorlist["move"] = self.connectionSign elif l == "delete": self.connectionSign = QtSvg.QGraphicsSvgItem('icons/delete.svg') self.connectionSign.setParent(self.connectionSource) self.connectionSign.setData(0, QtCore.QVariant("delete")) self.connectionSign.setScale( (1.0 * rectangle.height()) / self.connectionSign.boundingRect().height() ) position = item.mapToParent(rectangle.bottomRight()) self.connectionSign.setToolTip("Delete the object") self.connectorlist["delete"] = self.connectionSign if self.connectionSign != None: self.connectionSign.setFlag(QtGui.QGraphicsItem.ItemIsSelectable,True) self.connectionSign.setParentItem(item.parentItem()) self.connectionSign.setPos(0.0,0.0) self.connectionSign.moveBy( position.x()-self.xDisp , position.y() +self.yDisp - rectangle.height() / 2.0 ) def objExist(self,path,name,index): if index == 0: fPath = path+'/'+name else: fPath = path+'/'+name+'_'+str(index) if moose.exists(fPath): index += 1 return self.objExist(path,name,index) else: if index == 0: return else: return ('_'+str(index)) def selectSelections(self, selections): for selection in selections : if isinstance(selection, KineticsDisplayItem): self.selections.append(selection) def deselectSelections(self): for selection in self.selections: selection.setSelected(False) self.selections = [] def mousePressEvent(self, event): selectedItem = None if self.viewBaseType == "editorView": return self.editorMousePressEvent(event) elif self.viewBaseType == "runView": pos = event.pos() item = self.itemAt(pos) if item: itemClass = type(item).__name__ if ( itemClass!='ComptItem' and itemClass != 'QGraphicsPolygonItem' and itemClass != 'QGraphicsEllipseItem' and itemClass != 'QGraphicsRectItem'): self.setCursor(Qt.Qt.CrossCursor) mimeData = QtCore.QMimeData() mimeData.setText(item.mobj.name) mimeData.setData("text/plain", "") mimeData.data =(self.modelRoot,item.mobj) drag = QtGui.QDrag(self) drag.setMimeData(mimeData) dropAction = drag.start(QtCore.Qt.MoveAction) self.setCursor(Qt.Qt.ArrowCursor) def mouseMoveEvent(self,event): if self.viewBaseType == "editorView": return self.editorMouseMoveEvent(event) def mouseReleaseEvent(self, event): if self.viewBaseType == "editorView": for preSelectedItem in self.sceneContainerPt.selectedItems(): preSelectedItem.setSelected(False) return self.editorMouseReleaseEvent(event) return if self.state["press"]["mode"] == CONNECTION: desPos =self.mapToScene(event.pos()) destination = self.items(event.pos()) src = self.state["press"]["item"] des = [j for j in destination if isinstance(j,KineticsDisplayItem)] if len(des): self.populate_srcdes(src.mobj,des[0].mobj) #print " pop", self.layoutPt.srcdesConnection() self.setCursor(Qt.Qt.ArrowCursor) QtGui.QGraphicsView.mouseReleaseEvent(self, event) '''if(self.customrubberBand): self.customrubberBand.hide() self.customrubberBand = 0 if event.button() == QtCore.Qt.LeftButton and self.itemSelected == False : self.endingPos = event.pos() self.endScenepos = self.mapToScene(self.endingPos) self.rubberbandWidth = (self.endScenepos.x()-self.startScenepos.x()) self.rubberbandHeight = (self.endScenepos.y()-self.startScenepos.y()) selecteditems = self.sceneContainerPt.selectedItems() #print "selecteditems ",selecteditems if self.rubberbandWidth != 0 and self.rubberbandHeight != 0 and len(selecteditems) != 0 : self.showpopupmenu = True ''' #self.itemSelected = False ''' if self.showpopupmenu: popupmenu = QtGui.QMenu('PopupMenu', self) self.delete = QtGui.QAction(self.tr('delete'), self) self.connect(self.delete, QtCore.SIGNAL('triggered()'), self.deleteItems) self.zoom = QtGui.QAction(self.tr('zoom'), self) self.connect(self.zoom, QtCore.SIGNAL('triggered()'), self.zoomItem) self.move = QtGui.QAction(self.tr('move'), self) self.connect(self.move, QtCore.SIGNAL('triggered()'), self.moveItem) popupmenu.addAction(self.delete) popupmenu.addAction(self.zoom) popupmenu.addAction(self.move) popupmenu.exec_(event.globalPos()) self.showpopupmenu = False ''' def updateItemTransformationMode(self, on): for v in self.sceneContainerPt.items(): #v.setFlag(QtGui.QGraphicsItem.ItemIgnoresTransformations,on) if( not isinstance(v,ComptItem)): #if ( isinstance(v, PoolItem) or isinstance(v, ReacItem) or isinstance(v, EnzItem) or isinstance(v, CplxItem) ): if isinstance(v,KineticsDisplayItem): v.setFlag(QtGui.QGraphicsItem.ItemIgnoresTransformations, on) def keyPressEvent(self,event): key = event.key() self.removeConnector() if (key == Qt.Qt.Key_A and (event.modifiers() & Qt.Qt.ShiftModifier)): # 'A' fits the view to iconScale factor itemignoreZooming = False self.updateItemTransformationMode(itemignoreZooming) self.fitInView(self.sceneContainerPt.itemsBoundingRect().x()-10,self.sceneContainerPt.itemsBoundingRect().y()-10,self.sceneContainerPt.itemsBoundingRect().width()+20,self.sceneContainerPt.itemsBoundingRect().height()+20,Qt.Qt.IgnoreAspectRatio) self.layoutPt.drawLine_arrow(itemignoreZooming=False) elif (key == Qt.Qt.Key_Less or key == Qt.Qt.Key_Minus):# and (event.modifiers() & Qt.Qt.ShiftModifier)): # '<' key. zooms-in to iconScale factor self.iconScale *= 0.8 self.updateScale( self.iconScale ) elif (key == Qt.Qt.Key_Greater or key == Qt.Qt.Key_Plus):# and (event.modifiers() & Qt.Qt.ShiftModifier)): # '>' key. zooms-out to iconScale factor self.iconScale *= 1.25 self.updateScale( self.iconScale ) elif (key == Qt.Qt.Key_Period): # '.' key, lower case for '>' zooms in self.scale(1.1,1.1) elif (key == Qt.Qt.Key_Comma): # ',' key, lower case for '<' zooms in self.scale(1/1.1,1/1.1) elif (key == Qt.Qt.Key_A): # 'a' fits the view to initial value where iconscale=1 self.iconScale = 1 self.updateScale( self.iconScale ) self.fitInView(self.sceneContainerPt.itemsBoundingRect().x()-10,self.sceneContainerPt.itemsBoundingRect().y()-10,self.sceneContainerPt.itemsBoundingRect().width()+20,self.sceneContainerPt.itemsBoundingRect().height()+20,Qt.Qt.IgnoreAspectRatio) def updateScale( self, scale ): for item in self.sceneContainerPt.items(): if isinstance(item,KineticsDisplayItem): item.refresh(scale) #iteminfo = item.mobj.path+'/info' #xpos,ypos = self.positioninfo(iteminfo) xpos = item.scenePos().x() ypos = item.scenePos().y() if isinstance(item,ReacItem) or isinstance(item,EnzItem) or isinstance(item,MMEnzItem): item.setGeometry(xpos,ypos, item.gobj.boundingRect().width(), item.gobj.boundingRect().height()) elif isinstance(item,CplxItem): item.setGeometry(item.gobj.boundingRect().width()/2,item.gobj.boundingRect().height(), item.gobj.boundingRect().width(), item.gobj.boundingRect().height()) elif isinstance(item,PoolItem) or isinstance(item, PoolItemCircle): item.setGeometry(xpos, ypos,item.gobj.boundingRect().width() +PoolItem.fontMetrics.width(' '), item.gobj.boundingRect().height()) item.bg.setRect(0, 0, item.gobj.boundingRect().width()+PoolItem.fontMetrics.width(' '), item.gobj.boundingRect().height()) self.layoutPt.drawLine_arrow(itemignoreZooming=False) self.layoutPt.comptChilrenBoundingRect() #compartment width is resize according apart from calculating boundingRect # for k, v in self.layoutPt.qGraCompt.items(): # rectcompt = v.childrenBoundingRect() # comptPen = v.pen() # comptWidth = self.defaultComptsize*self.iconScale # comptPen.setWidth(comptWidth) # v.setPen(comptPen) # v.setRect(rectcompt.x()-comptWidth,rectcompt.y()-comptWidth,(rectcompt.width()+2*comptWidth),(rectcompt.height()+2*comptWidth)) def moveSelections(self): self.setCursor(Qt.Qt.CrossCursor) self.move = True return def GrVfitinView(self): #print " here in GrVfitinView" itemignoreZooming = False self.layoutPt.updateItemTransformationMode(itemignoreZooming) self.fitInView(self.sceneContainerPt.itemsBoundingRect().x()-10,self.sceneContainerPt.itemsBoundingRect().y()-10,self.sceneContainerPt.itemsBoundingRect().width()+20,self.sceneContainerPt.itemsBoundingRect().height()+20,Qt.Qt.IgnoreAspectRatio) self.layoutPt.drawLine_arrow(itemignoreZooming=False) def deleteSelections(self,x0,y0,x1,y1): if( x1-x0 > 0 and y1-y0 >0): self.rubberbandlist = self.sceneContainerPt.items(self.mapToScene(QtCore.QRect(x0, y0, x1 - x0, y1 - y0)).boundingRect(), Qt.Qt.IntersectsItemShape) for unselectitem in self.rubberbandlist: if unselectitem.isSelected() == True: unselectitem.setSelected(0) self.deleteObj(self.rubberbandlist) # deleteSolver(self.layoutPt.modelRoot) # for item in (qgraphicsitem for qgraphicsitem in self.rubberbandlist): # #First Loop to remove all the enz b'cos if parent (which is a Pool) is removed, # #then it will created problem at qgraphicalitem not having parent. # #So first delete enz and then delete pool # if isinstance(item,MMEnzItem) or isinstance(item,EnzItem) or isinstance(item,CplxItem): # self.deleteItem(item) # for item in (qgraphicsitem for qgraphicsitem in self.rubberbandlist): # if not (isinstance(item,MMEnzItem) or isinstance(item,EnzItem) or isinstance(item,CplxItem)): # if isinstance(item,PoolItem): # plot = moose.wildcardFind(self.layoutPt.modelRoot+'/data/graph#/#') # for p in plot: # if len(p.neighbors['requestOut']): # if item.mobj.path == moose.element(p.neighbors['requestOut'][0]).path: # p.tick = -1 # moose.delete(p) # self.layoutPt.plugin.view.getCentralWidget().plotWidgetContainer.plotAllData() # self.deleteItem(item) self.selections = [] def deleteObj(self,item): self.rubberbandlist = item deleteSolver(self.layoutPt.modelRoot) for item in (qgraphicsitem for qgraphicsitem in self.rubberbandlist): #First Loop to remove all the enz b'cos if parent (which is a Pool) is removed, #then it will created problem at qgraphicalitem not having parent. #So first delete enz and then delete pool if isinstance(item,MMEnzItem) or isinstance(item,EnzItem) or isinstance(item,CplxItem): self.deleteItem(item) for item in (qgraphicsitem for qgraphicsitem in self.rubberbandlist): if not (isinstance(item,MMEnzItem) or isinstance(item,EnzItem) or isinstance(item,CplxItem)): if isinstance(item,PoolItem): plot = moose.wildcardFind(self.layoutPt.modelRoot+'/data/graph#/#') for p in plot: if len(p.neighbors['requestOut']): if item.mobj.path == moose.element(p.neighbors['requestOut'][0]).path: p.tick = -1 moose.delete(p) self.layoutPt.plugin.view.getCentralWidget().plotWidgetContainer.plotAllData() self.deleteItem(item) def deleteObject2line(self,qpolygonline,src,des,endt): object2lineInfo = self.layoutPt.object2line[des] if len(object2lineInfo) == 1: for polygon,objdes,endtype,numL in object2lineInfo: if polygon == qpolygonline and objdes == src and endtype == endt: del(self.layoutPt.object2line[des]) else: print " check this condition when is len is single and else condition",qpolygonline, objdes,endtype else: n = 0 for polygon,objdes,endtype,numL in object2lineInfo: if polygon == qpolygonline and objdes == src and endtype == endt: tup = object2lineInfo[:n]+object2lineInfo[n+1:] self.layoutPt.object2line[des] = tup #d[keyNo].append((a,b,c)) else: n = n+1 def deleteConnection(self,item): #Delete moose connection, i.e one can click on connection arrow and delete the connection deleteSolver(self.layoutPt.modelRoot) msgIdforDeleting = " " if isinstance(item,QtGui.QGraphicsPolygonItem): src = self.layoutPt.lineItem_dict[item] lineItem_value = self.layoutPt.lineItem_dict[item] i = iter(lineItem_value) source = i.next() destination = i.next() endt = i.next() numl = i.next() self.deleteObject2line(item,source,destination,endt) self.deleteObject2line(item,destination,source,endt) try: del self.layoutPt.lineItem_dict[item] except KeyError: pass srcZero = [k for k, v in self.layoutPt.mooseId_GObj.iteritems() if v == src[0]] srcOne = [k for k, v in self.layoutPt.mooseId_GObj.iteritems() if v == src[1]] if isinstance (moose.element(srcZero[0]),moose.MMenz): gItem =self.layoutPt.mooseId_GObj[moose.element(srcZero[0])] # This block is done b'cos for MMenz while loaded from ReadKKit, the msg # from parent pool to Enz is different as compared to direct model building. # if ReadKKit get the msg from parent Pool, else from MMenz itself. # Rules: If some one tries to remove connection parent Pool to Enz # then delete entire enz itself, this is True for enz and mmenz for msg in srcZero[0].msgIn: if moose.element(msg.e1.path) == moose.element(srcOne[0].path): if src[2] == "t": if msg.destFieldsOnE2[0] == "enzDest": # delete indivial msg if later adding parent is possible # msgIdforDeleting = msg # moose.delete(msgIdforDeleting) # self.sceneContainerPt.removeItem(item) self.deleteItem(gItem) return else: self.getMsgId(src,srcZero,srcOne,item) moose.delete(msgIdforDeleting) self.sceneContainerPt.removeItem(item) setupItem(self.modelRoot,self.layoutPt.srcdesConnection) for msg in moose.element(srcZero[0].parent).msgIn: if moose.element(msg.e2.path) == moose.element(srcZero[0].parent.path): if src[2] == 't': if len(msg.destFieldsOnE1) > 0: if msg.destFieldsOnE1[0] == "enzDest": # delete indivial msg if later adding parent is possible # msgIdforDeleting = msg # moose.delete(msgIdforDeleting) # self.sceneContainerPt.removeItem(item) self.deleteItem(gItem) return else: self.getMsgId(src,srcZero,srcOne,item) elif isinstance (moose.element(srcZero[0]),moose.Enz): self.getMsgId(src,srcZero,srcOne,item) elif isinstance(moose.element(srcZero[0]),moose.Function): v = moose.Variable(srcZero[0].path+'/x') found = False for msg in v.msgIn: if moose.element(msg.e1.path) == moose.element(srcOne[0].path): if src[2] == "sts": if msg.destFieldsOnE2[0] == "input": msgIdforDeleting = msg self.deleteSceneObj(msgIdforDeleting,item) found = True if not found: for msg in srcZero[0].msgOut: if moose.element(msg.e2.path) == moose.element(srcOne[0].path): if src[2] == "stp": if msg.destFieldsOnE2[0] == "setN": gItem =self.layoutPt.mooseId_GObj[moose.element(srcZero[0])] self.deleteItem(gItem) self.deleteSceneObj(msg,item) return elif msg.destFieldsOnE2[0] == "setNumKf" or msg.destFieldsOnE2[0] == "setConcInit" or msg.destFieldsOnE2[0]=="increment": msgIdforDeleting = msg self.deleteSceneObj(msgIdforDeleting,item) gItem =self.layoutPt.mooseId_GObj[moose.element(srcZero[0])] self.deleteItem(gItem) else: self.getMsgId(src,srcZero,srcOne,item) def deleteSceneObj(self,msgIdforDeleting,item): moose.delete(msgIdforDeleting) self.sceneContainerPt.removeItem(item) setupItem(self.modelRoot,self.layoutPt.srcdesConnection) def getMsgId(self,src,srcZero,srcOne,item): for msg in srcZero[0].msgOut: msgIdforDeleting = " " if moose.element(msg.e2.path) == moose.element(srcOne[0].path): if src[2] == 's': # substrate connection for R,E if msg.srcFieldsOnE1[0] == "subOut": msgIdforDeleting = msg self.deleteSceneObj(msgIdforDeleting,item) return elif src[2] == 'p': # product connection for R,E if msg.srcFieldsOnE1[0] == "prdOut": msgIdforDeleting = msg self.deleteSceneObj(msgIdforDeleting,item) return elif src[2] == 't': if msg.srcFieldsOnE1[0] == "enzOut": gItem =self.layoutPt.mooseId_GObj[moose.element(srcZero[0])] self.deleteItem(gItem) return elif src[2] == 'tab': #stimulation Table connection if msg.srcFieldsOnE1[0] == "output": msgIdforDeleting = msg self.deleteSceneObj(msgIdforDeleting,item) return def deleteItem(self,item): #delete Items self.layoutPt.plugin.mainWindow.objectEditSlot('/', False) if isinstance(item,KineticsDisplayItem): if moose.exists(item.mobj.path): # if isinstance(item.mobj,Function): # print " inside the function" # for items in moose.element(item.mobj.path).children: # print items if isinstance(item,PoolItem) or isinstance(item,BufPool): # pool is item is removed, then check is made if its a parent to any # enz if 'yes', then enz and its connection are removed before # removing Pool for items in moose.element(item.mobj.path).children: # if isinstance(moose.element(items), Function): # gItem = self.layoutPt.mooseId_GObj[moose.element(items)] # for l in self.layoutPt.object2line[gItem]: # sceneItems = self.sceneContainerPt.items() # if l[0] in sceneItems: # #deleting the connection which is connected to Enz # self.sceneContainerPt.removeItem(l[0]) # moose.delete(items) # self.sceneContainerPt.removeItem(gItem) if isinstance(moose.element(items), EnzBase): gItem = self.layoutPt.mooseId_GObj[moose.element(items)] for l in self.layoutPt.object2line[gItem]: # Need to check if the connection on the scene exist # or its removed from some other means # E.g Enz to pool and pool to Enz is connected, # when enz is removed the connection is removed, # but when pool tried to remove then qgraphicscene says # "item scene is different from this scene" sceneItems = self.sceneContainerPt.items() if l[0] in sceneItems: #deleting the connection which is connected to Enz self.sceneContainerPt.removeItem(l[0]) moose.delete(items) self.sceneContainerPt.removeItem(gItem) #If pool/bufpool is input to a function and if pool/bufpool is removed then function is also removed. for msg in moose.element(item.mobj.path).msgOut: if (moose.element(msg.e2.path).className == "Variable" and msg.destFieldsOnE2[0]=="input"): funcp = moose.element(msg.e2.path).parent pool = moose.element(funcp).parent self.deleteItem(self.layoutPt.mooseId_GObj[funcp]) for l in self.layoutPt.object2line[item]: sceneItems = self.sceneContainerPt.items() if l[0] in sceneItems: self.sceneContainerPt.removeItem(l[0]) self.sceneContainerPt.removeItem(item) moose.delete(item.mobj) for key, value in self.layoutPt.object2line.items(): self.layoutPt.object2line[key] = filter( lambda tup: tup[1] != item ,value) self.layoutPt.getMooseObj() setupItem(self.modelRoot,self.layoutPt.srcdesConnection) def zoomSelections(self, x0, y0, x1, y1): p0 = self.mapToScene(x0, y0) p1 = self.mapToScene(x1, y1) #print QtCore.QRectF(p0, p1) self.fitInView(QtCore.QRectF(p0, p1), Qt.Qt.KeepAspectRatio) self.deselectSelections() return def wheelEvent(self,event): factor = 1.41 ** (event.delta() / 240.0) self.scale(factor, factor) def dragEnterEvent(self, event): if self.viewBaseType == "editorView": if event.mimeData().hasFormat('text/plain'): event.acceptProposedAction() else: pass def dragMoveEvent(self, event): if self.viewBaseType == "editorView": if event.mimeData().hasFormat('text/plain'): event.acceptProposedAction() else: pass def eventFilter(self, source, event): if self.viewBase == "editorView": if (event.type() == QtCore.QEvent.Drop): pass else: pass def dropEvent(self, event): """Insert an element of the specified class in drop location""" """ Pool and reaction should have compartment as parent, dropping outside the compartment is not allowed """ """ Enz should be droped on the PoolItem which inturn will be under compartment""" if self.viewBaseType == "editorView": if not event.mimeData().hasFormat('text/plain'): return event_pos = event.pos() string = str(event.mimeData().text()) createObj(self.viewBaseType,self,self.modelRoot,string,event_pos,self.layoutPt) def populate_srcdes(self,src,des): self.modelRoot = self.layoutPt.modelRoot callsetupItem = True #print " populate_srcdes ",src,des srcClass = moose.element(src).className if 'Zombie' in srcClass: srcClass = srcClass.split('Zombie')[1] desClass = moose.element(des).className if 'Zombie' in desClass: desClass = desClass.split('Zombie')[1] if ( isinstance(moose.element(src),PoolBase) and ( (isinstance(moose.element(des),ReacBase) ) or isinstance(moose.element(des),EnzBase) )): #If one to tries to connect pool to Reac/Enz (substrate to Reac/Enz), check if already (product to Reac/Enz) exist. #If exist then connection not allowed one need to delete the msg and try connecting back. found = False for msg in des.msgOut: if moose.element(msg.e2.path) == src: if msg.srcFieldsOnE1[0] == "prdOut": found = True if found == False: # moose.connect(src, 'reac', des, 'sub', 'OneToOne') moose.connect(des, 'sub', src, 'reac', 'OneToOne') else: srcdesString = srcClass+' is already connected as '+ '\'Product\''+' to '+desClass +' \n \nIf you wish to connect this object then first delete the exist connection' QtGui.QMessageBox.information(None,'Connection Not possible','{srcdesString}'.format(srcdesString = srcdesString),QtGui.QMessageBox.Ok) elif (isinstance (moose.element(src),PoolBase) and (isinstance(moose.element(des),Function))): numVariables = des.numVars expr = "" expr = (des.expr+'+'+'x'+str(numVariables)) expr = expr.lstrip("0 +") expr = expr.replace(" ","") des.expr = expr moose.connect( src, 'nOut', des.x[numVariables], 'input' ) elif ( isinstance(moose.element(src),Function) and (moose.element(des).className=="Pool") ): if ((element(des).parent).className != 'Enz'): moose.connect(src, 'valueOut', des, 'increment', 'OneToOne') else: srcdesString = element(src).className+'-- EnzCplx' QtGui.QMessageBox.information(None,'Connection Not possible','\'{srcdesString}\' not allowed to connect'.format(srcdesString = srcdesString),QtGui.QMessageBox.Ok) callsetupItem = False elif ( isinstance(moose.element(src),Function) and (moose.element(des).className=="BufPool") ): moose.connect(src, 'valueOut', des, 'setN', 'OneToOne') elif ( isinstance(moose.element(src),Function) and (isinstance(moose.element(des),ReacBase) ) ): moose.connect(src, 'valueOut', des, 'setNumKf', 'OneToOne') elif (((isinstance(moose.element(src),ReacBase))or (isinstance(moose.element(src),EnzBase))) and (isinstance(moose.element(des),PoolBase))): found = False for msg in src.msgOut: if moose.element(msg.e2.path) == des: if msg.srcFieldsOnE1[0] == "subOut": found = True if found == False: #moose.connect(src, 'prd', des, 'reac', 'OneToOne') moose.connect(src, 'prd', des, 'reac', 'OneToOne') else: srcdesString = desClass+' is already connected as '+'\'Substrate\''+' to '+srcClass +' \n \nIf you wish to connect this object then first delete the exist connection' QtGui.QMessageBox.information(None,'Connection Not possible','{srcdesString}'.format(srcdesString = srcdesString),QtGui.QMessageBox.Ok) # elif( isinstance(moose.element(src),ReacBase) and (isinstance(moose.element(des),PoolBase) ) ): # moose.connect(src, 'prd', des, 'reac', 'OneToOne') # elif( isinstance(moose.element(src),EnzBase) and (isinstance(moose.element(des),PoolBase) ) ): # moose.connect(src, 'prd', des, 'reac', 'OneToOne') elif( isinstance(moose.element(src),StimulusTable) and (isinstance(moose.element(des),PoolBase) ) ): moose.connect(src, 'output', des, 'setConcInit', 'OneToOne') else: srcString = moose.element(src).className desString = moose.element(des).className srcdesString = srcString+'--'+desString QtGui.QMessageBox.information(None,'Connection Not possible','\'{srcdesString}\' not allowed to connect'.format(srcdesString = srcdesString),QtGui.QMessageBox.Ok) callsetupItem = False if callsetupItem: self.layoutPt.getMooseObj() setupItem(self.modelRoot,self.layoutPt.srcdesConnection) self.layoutPt.drawLine_arrow(False)
dilawar/moose-full
moose-gui/plugins/kkitViewcontrol.py
Python
gpl-2.0
58,605
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for BatchCreateFeatures # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-aiplatform # [START aiplatform_generated_aiplatform_v1beta1_FeaturestoreService_BatchCreateFeatures_sync] from google.cloud import aiplatform_v1beta1 def sample_batch_create_features(): # Create a client client = aiplatform_v1beta1.FeaturestoreServiceClient() # Initialize request argument(s) requests = aiplatform_v1beta1.CreateFeatureRequest() requests.parent = "parent_value" requests.feature.value_type = "BYTES" requests.feature_id = "feature_id_value" request = aiplatform_v1beta1.BatchCreateFeaturesRequest( parent="parent_value", requests=requests, ) # Make the request operation = client.batch_create_features(request=request) print("Waiting for operation to complete...") response = operation.result() # Handle the response print(response) # [END aiplatform_generated_aiplatform_v1beta1_FeaturestoreService_BatchCreateFeatures_sync]
googleapis/python-aiplatform
samples/generated_samples/aiplatform_generated_aiplatform_v1beta1_featurestore_service_batch_create_features_sync.py
Python
apache-2.0
1,861
#!/usr/bin/env python # coding=utf-8 import os import tempfile import shutil import imp from contextlib import closing from zipfile import ZipFile, ZIP_DEFLATED class Provider(object): def __init__(self, user, password, database, host, port): super(Provider, self).__init__() self.user = user self.password = password self.host = host self.port = port self.database = database @classmethod def get_provider(cls, database_name): provider_name = '{0}_provider.py'.format(database_name) providers = imp.load_source('providers', os.path.join(os.path.dirname(__file__), provider_name)) return getattr(providers, '{0}Provider'.format(database_name.title())) def gen_backup_cmd(self, tmp_file_path): pass def backup(self): tmp_file, tmp_file_path = tempfile.mkstemp('.dbacker') try: tmp_file_path, cmd = self.gen_backup_cmd(tmp_file_path) if os.system(cmd) != 0: raise RuntimeError('backup command line fail') if os.path.isdir(tmp_file_path): try: tmp_file, ziped_tmp_file_path =\ tempfile.mkstemp('.dbacker') z = ZipFile(ziped_tmp_file_path, "w", ZIP_DEFLATED) with closing(z) as tmp_zip_file: for root, dirs, file_paths in os.walk(tmp_file_path): for file_path in file_paths: abs_file_path = os.path.join(root, file_path) zip_file_path =\ abs_file_path[len(tmp_file_path) + len(os.sep):] tmp_zip_file.write(abs_file_path, zip_file_path) finally: shutil.rmtree(tmp_file_path) tmp_file_path = ziped_tmp_file_path except Exception as e: if os.path.isdir(tmp_file_path): shutil.rmtree(tmp_file_path) else: os.remove(tmp_file_path) raise e return tmp_file_path
exherb/dbacker
dbacker/providers/provider.py
Python
mit
2,369
import matplotlib.pyplot as plt import numpy as np import os from inference import error_fn, infer_interactions, choose_J_from_general_form, solve_true_covariance_from_true_J from pitchfork_langevin import jacobian_pitchfork, gen_multitraj, steadystate_pitchfork from settings import DEFAULT_PARAMS, FOLDER_OUTPUT, TAU from statistical_formulae import collect_multitraj_info, build_diffusion_from_langevin, build_covariance_at_step from visualize_matrix import plot_matrix """ Assess error in JC + (JC)^T + D = 0 as num_traj varies, since C computed from num_traj """ # TODO plot heatmaps fn for each step in get_errors_from_one_traj def get_errors_for_replicates(num_traj=500, num_steps=500, replicates=10, params=DEFAULT_PARAMS, noise=1.0): true_errors = np.zeros(replicates) infer_errors = np.zeros(replicates) # get true J fp_mid = steadystate_pitchfork(params)[:, 0] J_true = jacobian_pitchfork(params, fp_mid, print_eig=False) for k in xrange(replicates): trials_states, _ = gen_multitraj(num_traj, init_cond=fp_mid, num_steps=num_steps, params=params, noise=noise) D, C_est, J_infer = collect_multitraj_info(trials_states, params, noise, alpha=0.01, tol=1e-6) true_errors[k] = error_fn(C_est, D, J_true) infer_errors[k] = error_fn(C_est, D, J_infer) return true_errors, infer_errors def get_errors_from_one_traj(covperiod=5, num_traj=500, num_steps=5000, params=DEFAULT_PARAMS, noise=0.1, infer=True, alpha=0.01): # get points to measure at num_pts = int(num_steps/covperiod) covsteps = [a*covperiod for a in xrange(num_pts)] plotperiod = covperiod * 100 # prep error vectors true_errors = np.zeros(num_pts) infer_errors = None J_infer_errors = None if infer: infer_errors = np.zeros(num_pts) J_infer_errors = np.zeros(num_pts) J_U0choice_errors = np.zeros(num_pts) cov_lyap_errors = np.zeros(num_pts) # get true J and D fp_mid = steadystate_pitchfork(params)[:, 0] J_true = jacobian_pitchfork(params, fp_mid, print_eig=False) D = build_diffusion_from_langevin(params, noise) C_lyap = solve_true_covariance_from_true_J(J_true, D) print 'norm of C_lyap', np.linalg.norm(C_lyap) plot_matrix(C_lyap, method='C_lyap', title_mod='static', plotdir=FOLDER_OUTPUT) # compute long traj multitraj, _ = gen_multitraj(num_traj, init_cond=fp_mid, num_steps=num_steps, params=params, noise=noise) # get error for all covsteps for idx, step in enumerate(covsteps): C_est = build_covariance_at_step(multitraj, params, covstep=step) J_U0choice = choose_J_from_general_form(C_est, D, scale=0.0) true_errors[idx] = error_fn(C_est, D, J_true) J_U0choice_errors[idx] = np.linalg.norm(J_true - J_U0choice) print step, covperiod*100, step % covperiod*100 if step % plotperiod == 0: plot_matrix(C_est, method='C_data', title_mod='step%d' % step, plotdir=FOLDER_OUTPUT) if infer: print "inferring..." J_infer = infer_interactions(C_est, D, alpha=alpha, tol=1e-6) print "done" infer_errors[idx] = error_fn(C_est, D, J_infer) J_infer_errors[idx] = np.linalg.norm(J_true - J_infer) cov_lyap_errors[idx] = np.linalg.norm(C_lyap - C_est) print idx, step, np.linalg.norm(C_est), cov_lyap_errors[idx] return covsteps, true_errors, infer_errors, J_infer_errors, J_U0choice_errors, cov_lyap_errors if __name__ == '__main__': # run settings many_reps_endpt = False one_rep_long = True if many_reps_endpt: reps = 10 mod = 'num_steps' assert mod in ['num_traj', 'num_steps'] num_traj_set = [int(a) for a in np.linspace(10, 600, 6)] num_steps_set = [int(a) for a in np.linspace(10, 2000, 5)] param_vary_set = {'num_traj': num_traj_set, 'num_steps': num_steps_set}[mod] true_errors_mid = np.zeros(len(param_vary_set)) true_errors_sd = np.zeros(len(param_vary_set)) infer_errors_mid = np.zeros(len(param_vary_set)) infer_errors_sd = np.zeros(len(param_vary_set)) # compute errors and do inference for i, elem in enumerate(param_vary_set): print "point %d (%s %d)" % (i, mod, elem) if mod == 'num_traj': true_errors, infer_errors = get_errors_for_replicates(num_traj=elem, replicates=reps, noise=0.1) else: true_errors, infer_errors = get_errors_for_replicates(num_steps=elem, replicates=reps, noise=0.1) true_errors_mid[i] = np.mean(true_errors) true_errors_sd[i] = np.std(true_errors) infer_errors_mid[i] = np.mean(infer_errors) infer_errors_sd[i] = np.std(infer_errors) # plot plt.errorbar(param_vary_set, true_errors_mid, yerr=true_errors_sd, label='true J errors', fmt='o') plt.errorbar(param_vary_set, infer_errors_mid, yerr=infer_errors_sd, label='infer J errors', fmt='o') plt.title('Reconstruction error (true J vs inferred) for varying %s' % mod) plt.xlabel('%s' % mod) plt.ylabel('F-norm of JC + (JC)^T + D') plt.legend() plt.show() # alternate: errors for one long multi-traj at increasing timepoints points infer = False if one_rep_long: alpha = 1e-8 num_steps = 5000 num_traj = 500 #5000 covsteps, true_errors, infer_errors, J_infer_errors, J_U0choice_errors, cov_errors = \ get_errors_from_one_traj(alpha=alpha, num_steps=num_steps, num_traj=num_traj, infer=infer) # plotting f = plt.figure(figsize=(16, 8)) plt.plot(covsteps, true_errors, '--k', label='true error') if infer: plt.plot(covsteps, infer_errors, '--b', label='inference error') plt.title('Reconstruction error (true J vs inference alpha=%.1e) for 1 multiraj (num_steps %s, num_traj %d)' % (alpha, num_steps, num_traj)) plt.xlabel('step') plt.ylabel('F-norm of JC + (JC)^T + D') plt.legend() plt.savefig(FOLDER_OUTPUT + os.sep + 'fnorm_reconstruct_flucdiss_a%.1e_traj%d_steps%d_tau%.2f.png' % (alpha, num_traj, num_steps, TAU)) # J error f2 = plt.figure(figsize=(16, 8)) if infer: plt.plot(covsteps, J_infer_errors, '--b', label='inference error') plt.plot(covsteps, J_U0choice_errors, '--r', label='general form + choose U=0 error') plt.title('Reconstruction error of J (U=0 choice vs inference alpha=%.1e) for 1 multiraj (num_steps %s, num_traj %d)' % (alpha, num_steps, num_traj)) plt.xlabel('step') plt.ylabel('F-norm of J_true - J_method') plt.legend() plt.savefig(FOLDER_OUTPUT + os.sep + 'fnorm_reconstruct_J_a%.1e_traj%d_steps%d_tau%.2f.png' % (alpha, num_traj, num_steps, TAU)) plt.close() # C_lyap vs C_data error f3 = plt.figure(figsize=(16, 8)) plt.plot(covsteps, cov_errors, '--b', label='cov error') plt.title( 'Reconstruction error of C_lyap from asymptotic C_data for 1 multiraj (num_steps %s, num_traj %d)' % (num_steps, num_traj)) plt.xlabel('step') plt.ylabel('F-norm of C_lyap - C_data') plt.legend() plt.savefig(FOLDER_OUTPUT + os.sep + 'fnorm_reconstruct_C_lyap_traj%d_steps%d_tau%.2f.png' % (num_traj, num_steps, TAU))
mattsmart/biomodels
transcriptome_clustering/baseline_reconstruction_error.py
Python
mit
7,422
""" Definition of TreeNode: class TreeNode: def __init__(self, val): this.val = val this.left, this.right = None, None """ class Solution: """ @param root: The root of binary tree. @return: An integer """ def maxDepth(self, root): # write your code here if root == None: return 0 left = self.maxDepth(root.left) right = self.maxDepth(root.right) return max(left, right) + 1
shootsoft/practice
lintcode/NineChapters/03/maximum-depth-of-binary-tree.py
Python
apache-2.0
469
import os from nose import SkipTest import copy import numpy as np import pandas as pd from pandas import DataFrame from pandas.util.testing import TestCase import pandas.util.testing as tm # this is a mess. Getting failures on a python 2.7 build with # whenever we try to import jinja, whether it's installed or not. # so we're explicitly skipping that one *before* we try to import # jinja. We still need to export the imports as globals, # since importing Styler tries to import jinja2. job_name = os.environ.get('JOB_NAME', None) if job_name == '27_slow_nnet_LOCALE': raise SkipTest("No jinja") try: # Do try except on just jinja, so the only reason # We skip is if jinja can't import, not something else import jinja2 # noqa except ImportError: raise SkipTest("No Jinja2") from pandas.core.style import Styler # noqa class TestStyler(TestCase): def setUp(self): np.random.seed(24) self.s = DataFrame({'A': np.random.permutation(range(6))}) self.df = DataFrame({'A': [0, 1], 'B': np.random.randn(2)}) self.f = lambda x: x self.g = lambda x: x def h(x, foo='bar'): return pd.Series(['color: %s' % foo], index=x.index, name=x.name) self.h = h self.styler = Styler(self.df) self.attrs = pd.DataFrame({'A': ['color: red', 'color: blue']}) self.dataframes = [ self.df, pd.DataFrame({'f': [1., 2.], 'o': ['a', 'b'], 'c': pd.Categorical(['a', 'b'])}) ] def test_update_ctx(self): self.styler._update_ctx(self.attrs) expected = {(0, 0): ['color: red'], (1, 0): ['color: blue']} self.assertEqual(self.styler.ctx, expected) def test_update_ctx_flatten_multi(self): attrs = DataFrame({"A": ['color: red; foo: bar', 'color: blue; foo: baz']}) self.styler._update_ctx(attrs) expected = {(0, 0): ['color: red', ' foo: bar'], (1, 0): ['color: blue', ' foo: baz']} self.assertEqual(self.styler.ctx, expected) def test_update_ctx_flatten_multi_traliing_semi(self): attrs = DataFrame({"A": ['color: red; foo: bar;', 'color: blue; foo: baz;']}) self.styler._update_ctx(attrs) expected = {(0, 0): ['color: red', ' foo: bar'], (1, 0): ['color: blue', ' foo: baz']} self.assertEqual(self.styler.ctx, expected) def test_copy(self): s2 = copy.copy(self.styler) self.assertTrue(self.styler is not s2) self.assertTrue(self.styler.ctx is s2.ctx) # shallow self.assertTrue(self.styler._todo is s2._todo) self.styler._update_ctx(self.attrs) self.styler.highlight_max() self.assertEqual(self.styler.ctx, s2.ctx) self.assertEqual(self.styler._todo, s2._todo) def test_deepcopy(self): s2 = copy.deepcopy(self.styler) self.assertTrue(self.styler is not s2) self.assertTrue(self.styler.ctx is not s2.ctx) self.assertTrue(self.styler._todo is not s2._todo) self.styler._update_ctx(self.attrs) self.styler.highlight_max() self.assertNotEqual(self.styler.ctx, s2.ctx) self.assertEqual(s2._todo, []) self.assertNotEqual(self.styler._todo, s2._todo) def test_clear(self): s = self.df.style.highlight_max()._compute() self.assertTrue(len(s.ctx) > 0) self.assertTrue(len(s._todo) > 0) s.clear() self.assertTrue(len(s.ctx) == 0) self.assertTrue(len(s._todo) == 0) def test_render(self): df = pd.DataFrame({"A": [0, 1]}) style = lambda x: pd.Series(["color: red", "color: blue"], name=x.name) s = Styler(df, uuid='AB').apply(style).apply(style, axis=1) s.render() # it worked? def test_render_double(self): df = pd.DataFrame({"A": [0, 1]}) style = lambda x: pd.Series(["color: red; border: 1px", "color: blue; border: 2px"], name=x.name) s = Styler(df, uuid='AB').apply(style) s.render() # it worked? def test_set_properties(self): df = pd.DataFrame({"A": [0, 1]}) result = df.style.set_properties(color='white', size='10px')._compute().ctx # order is deterministic v = ["color: white", "size: 10px"] expected = {(0, 0): v, (1, 0): v} self.assertEqual(result.keys(), expected.keys()) for v1, v2 in zip(result.values(), expected.values()): self.assertEqual(sorted(v1), sorted(v2)) def test_set_properties_subset(self): df = pd.DataFrame({'A': [0, 1]}) result = df.style.set_properties(subset=pd.IndexSlice[0, 'A'], color='white')._compute().ctx expected = {(0, 0): ['color: white']} self.assertEqual(result, expected) def test_empty_index_name_doesnt_display(self): # https://github.com/pydata/pandas/pull/12090#issuecomment-180695902 df = pd.DataFrame({'A': [1, 2], 'B': [3, 4], 'C': [5, 6]}) result = df.style._translate() expected = [[{'class': 'blank', 'type': 'th', 'value': ''}, {'class': 'col_heading level0 col0', 'display_value': 'A', 'type': 'th', 'value': 'A'}, {'class': 'col_heading level0 col1', 'display_value': 'B', 'type': 'th', 'value': 'B'}, {'class': 'col_heading level0 col2', 'display_value': 'C', 'type': 'th', 'value': 'C'}]] self.assertEqual(result['head'], expected) def test_index_name(self): # https://github.com/pydata/pandas/issues/11655 df = pd.DataFrame({'A': [1, 2], 'B': [3, 4], 'C': [5, 6]}) result = df.set_index('A').style._translate() expected = [[{'class': 'blank', 'type': 'th', 'value': ''}, {'class': 'col_heading level0 col0', 'type': 'th', 'value': 'B', 'display_value': 'B'}, {'class': 'col_heading level0 col1', 'type': 'th', 'value': 'C', 'display_value': 'C'}], [{'class': 'col_heading level2 col0', 'type': 'th', 'value': 'A'}, {'class': 'blank', 'type': 'th', 'value': ''}, {'class': 'blank', 'type': 'th', 'value': ''}]] self.assertEqual(result['head'], expected) def test_multiindex_name(self): # https://github.com/pydata/pandas/issues/11655 df = pd.DataFrame({'A': [1, 2], 'B': [3, 4], 'C': [5, 6]}) result = df.set_index(['A', 'B']).style._translate() expected = [[{'class': 'blank', 'type': 'th', 'value': ''}, {'class': 'blank', 'type': 'th', 'value': ''}, {'class': 'col_heading level0 col0', 'type': 'th', 'value': 'C', 'display_value': 'C'}], [{'class': 'col_heading level2 col0', 'type': 'th', 'value': 'A'}, {'class': 'col_heading level2 col1', 'type': 'th', 'value': 'B'}, {'class': 'blank', 'type': 'th', 'value': ''}]] self.assertEqual(result['head'], expected) def test_numeric_columns(self): # https://github.com/pydata/pandas/issues/12125 # smoke test for _translate df = pd.DataFrame({0: [1, 2, 3]}) df.style._translate() def test_apply_axis(self): df = pd.DataFrame({'A': [0, 0], 'B': [1, 1]}) f = lambda x: ['val: %s' % x.max() for v in x] result = df.style.apply(f, axis=1) self.assertEqual(len(result._todo), 1) self.assertEqual(len(result.ctx), 0) result._compute() expected = {(0, 0): ['val: 1'], (0, 1): ['val: 1'], (1, 0): ['val: 1'], (1, 1): ['val: 1']} self.assertEqual(result.ctx, expected) result = df.style.apply(f, axis=0) expected = {(0, 0): ['val: 0'], (0, 1): ['val: 1'], (1, 0): ['val: 0'], (1, 1): ['val: 1']} result._compute() self.assertEqual(result.ctx, expected) result = df.style.apply(f) # default result._compute() self.assertEqual(result.ctx, expected) def test_apply_subset(self): axes = [0, 1] slices = [pd.IndexSlice[:], pd.IndexSlice[:, ['A']], pd.IndexSlice[[1], :], pd.IndexSlice[[1], ['A']], pd.IndexSlice[:2, ['A', 'B']]] for ax in axes: for slice_ in slices: result = self.df.style.apply(self.h, axis=ax, subset=slice_, foo='baz')._compute().ctx expected = dict(((r, c), ['color: baz']) for r, row in enumerate(self.df.index) for c, col in enumerate(self.df.columns) if row in self.df.loc[slice_].index and col in self.df.loc[slice_].columns) self.assertEqual(result, expected) def test_applymap_subset(self): def f(x): return 'foo: bar' slices = [pd.IndexSlice[:], pd.IndexSlice[:, ['A']], pd.IndexSlice[[1], :], pd.IndexSlice[[1], ['A']], pd.IndexSlice[:2, ['A', 'B']]] for slice_ in slices: result = self.df.style.applymap(f, subset=slice_)._compute().ctx expected = dict(((r, c), ['foo: bar']) for r, row in enumerate(self.df.index) for c, col in enumerate(self.df.columns) if row in self.df.loc[slice_].index and col in self.df.loc[slice_].columns) self.assertEqual(result, expected) def test_empty(self): df = pd.DataFrame({'A': [1, 0]}) s = df.style s.ctx = {(0, 0): ['color: red'], (1, 0): ['']} result = s._translate()['cellstyle'] expected = [{'props': [['color', ' red']], 'selector': 'row0_col0'}, {'props': [['', '']], 'selector': 'row1_col0'}] self.assertEqual(result, expected) def test_bar(self): df = pd.DataFrame({'A': [0, 1, 2]}) result = df.style.bar()._compute().ctx expected = { (0, 0): ['width: 10em', ' height: 80%'], (1, 0): ['width: 10em', ' height: 80%', 'background: linear-gradient(' '90deg,#d65f5f 50.0%, transparent 0%)'], (2, 0): ['width: 10em', ' height: 80%', 'background: linear-gradient(' '90deg,#d65f5f 100.0%, transparent 0%)'] } self.assertEqual(result, expected) result = df.style.bar(color='red', width=50)._compute().ctx expected = { (0, 0): ['width: 10em', ' height: 80%'], (1, 0): ['width: 10em', ' height: 80%', 'background: linear-gradient(' '90deg,red 25.0%, transparent 0%)'], (2, 0): ['width: 10em', ' height: 80%', 'background: linear-gradient(' '90deg,red 50.0%, transparent 0%)'] } self.assertEqual(result, expected) df['C'] = ['a'] * len(df) result = df.style.bar(color='red', width=50)._compute().ctx self.assertEqual(result, expected) df['C'] = df['C'].astype('category') result = df.style.bar(color='red', width=50)._compute().ctx self.assertEqual(result, expected) def test_bar_0points(self): df = pd.DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) result = df.style.bar()._compute().ctx expected = {(0, 0): ['width: 10em', ' height: 80%'], (0, 1): ['width: 10em', ' height: 80%'], (0, 2): ['width: 10em', ' height: 80%'], (1, 0): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 50.0%,' ' transparent 0%)'], (1, 1): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 50.0%,' ' transparent 0%)'], (1, 2): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 50.0%,' ' transparent 0%)'], (2, 0): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 100.0%' ', transparent 0%)'], (2, 1): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 100.0%' ', transparent 0%)'], (2, 2): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 100.0%' ', transparent 0%)']} self.assertEqual(result, expected) result = df.style.bar(axis=1)._compute().ctx expected = {(0, 0): ['width: 10em', ' height: 80%'], (0, 1): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 50.0%,' ' transparent 0%)'], (0, 2): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 100.0%' ', transparent 0%)'], (1, 0): ['width: 10em', ' height: 80%'], (1, 1): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 50.0%' ', transparent 0%)'], (1, 2): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 100.0%' ', transparent 0%)'], (2, 0): ['width: 10em', ' height: 80%'], (2, 1): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 50.0%' ', transparent 0%)'], (2, 2): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 100.0%' ', transparent 0%)']} self.assertEqual(result, expected) def test_highlight_null(self, null_color='red'): df = pd.DataFrame({'A': [0, np.nan]}) result = df.style.highlight_null()._compute().ctx expected = {(0, 0): [''], (1, 0): ['background-color: red']} self.assertEqual(result, expected) def test_nonunique_raises(self): df = pd.DataFrame([[1, 2]], columns=['A', 'A']) with tm.assertRaises(ValueError): df.style with tm.assertRaises(ValueError): Styler(df) def test_caption(self): styler = Styler(self.df, caption='foo') result = styler.render() self.assertTrue(all(['caption' in result, 'foo' in result])) styler = self.df.style result = styler.set_caption('baz') self.assertTrue(styler is result) self.assertEqual(styler.caption, 'baz') def test_uuid(self): styler = Styler(self.df, uuid='abc123') result = styler.render() self.assertTrue('abc123' in result) styler = self.df.style result = styler.set_uuid('aaa') self.assertTrue(result is styler) self.assertEqual(result.uuid, 'aaa') def test_table_styles(self): style = [{'selector': 'th', 'props': [('foo', 'bar')]}] styler = Styler(self.df, table_styles=style) result = ' '.join(styler.render().split()) self.assertTrue('th { foo: bar; }' in result) styler = self.df.style result = styler.set_table_styles(style) self.assertTrue(styler is result) self.assertEqual(styler.table_styles, style) def test_table_attributes(self): attributes = 'class="foo" data-bar' styler = Styler(self.df, table_attributes=attributes) result = styler.render() self.assertTrue('class="foo" data-bar' in result) result = self.df.style.set_table_attributes(attributes).render() self.assertTrue('class="foo" data-bar' in result) def test_precision(self): with pd.option_context('display.precision', 10): s = Styler(self.df) self.assertEqual(s.precision, 10) s = Styler(self.df, precision=2) self.assertEqual(s.precision, 2) s2 = s.set_precision(4) self.assertTrue(s is s2) self.assertEqual(s.precision, 4) def test_apply_none(self): def f(x): return pd.DataFrame(np.where(x == x.max(), 'color: red', ''), index=x.index, columns=x.columns) result = (pd.DataFrame([[1, 2], [3, 4]]) .style.apply(f, axis=None)._compute().ctx) self.assertEqual(result[(1, 1)], ['color: red']) def test_trim(self): result = self.df.style.render() # trim=True self.assertEqual(result.count('#'), 0) result = self.df.style.highlight_max().render() self.assertEqual(result.count('#'), len(self.df.columns)) def test_highlight_max(self): df = pd.DataFrame([[1, 2], [3, 4]], columns=['A', 'B']) # max(df) = min(-df) for max_ in [True, False]: if max_: attr = 'highlight_max' else: df = -df attr = 'highlight_min' result = getattr(df.style, attr)()._compute().ctx self.assertEqual(result[(1, 1)], ['background-color: yellow']) result = getattr(df.style, attr)(color='green')._compute().ctx self.assertEqual(result[(1, 1)], ['background-color: green']) result = getattr(df.style, attr)(subset='A')._compute().ctx self.assertEqual(result[(1, 0)], ['background-color: yellow']) result = getattr(df.style, attr)(axis=0)._compute().ctx expected = {(1, 0): ['background-color: yellow'], (1, 1): ['background-color: yellow'], (0, 1): [''], (0, 0): ['']} self.assertEqual(result, expected) result = getattr(df.style, attr)(axis=1)._compute().ctx expected = {(0, 1): ['background-color: yellow'], (1, 1): ['background-color: yellow'], (0, 0): [''], (1, 0): ['']} self.assertEqual(result, expected) # separate since we cant negate the strs df['C'] = ['a', 'b'] result = df.style.highlight_max()._compute().ctx expected = {(1, 1): ['background-color: yellow']} result = df.style.highlight_min()._compute().ctx expected = {(0, 0): ['background-color: yellow']} def test_export(self): f = lambda x: 'color: red' if x > 0 else 'color: blue' g = lambda x, y, z: 'color: %s' if x > 0 else 'color: %s' % z style1 = self.styler style1.applymap(f)\ .applymap(g, y='a', z='b')\ .highlight_max() result = style1.export() style2 = self.df.style style2.use(result) self.assertEqual(style1._todo, style2._todo) style2.render() def test_display_format(self): df = pd.DataFrame(np.random.random(size=(2, 2))) ctx = df.style.format("{:0.1f}")._translate() self.assertTrue(all(['display_value' in c for c in row] for row in ctx['body'])) self.assertTrue(all([len(c['display_value']) <= 3 for c in row[1:]] for row in ctx['body'])) self.assertTrue( len(ctx['body'][0][1]['display_value'].lstrip('-')) <= 3) def test_display_format_raises(self): df = pd.DataFrame(np.random.randn(2, 2)) with tm.assertRaises(TypeError): df.style.format(5) with tm.assertRaises(TypeError): df.style.format(True) def test_display_subset(self): df = pd.DataFrame([[.1234, .1234], [1.1234, 1.1234]], columns=['a', 'b']) ctx = df.style.format({"a": "{:0.1f}", "b": "{0:.2%}"}, subset=pd.IndexSlice[0, :])._translate() expected = '0.1' self.assertEqual(ctx['body'][0][1]['display_value'], expected) self.assertEqual(ctx['body'][1][1]['display_value'], '1.1234') self.assertEqual(ctx['body'][0][2]['display_value'], '12.34%') raw_11 = '1.1234' ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[0, :])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) self.assertEqual(ctx['body'][1][1]['display_value'], raw_11) ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[0, :])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) self.assertEqual(ctx['body'][1][1]['display_value'], raw_11) ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice['a'])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) self.assertEqual(ctx['body'][0][2]['display_value'], '0.1234') ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[0, 'a'])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) self.assertEqual(ctx['body'][1][1]['display_value'], raw_11) ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[[0, 1], ['a']])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) self.assertEqual(ctx['body'][1][1]['display_value'], '1.1') self.assertEqual(ctx['body'][0][2]['display_value'], '0.1234') self.assertEqual(ctx['body'][1][2]['display_value'], '1.1234') def test_display_dict(self): df = pd.DataFrame([[.1234, .1234], [1.1234, 1.1234]], columns=['a', 'b']) ctx = df.style.format({"a": "{:0.1f}", "b": "{0:.2%}"})._translate() self.assertEqual(ctx['body'][0][1]['display_value'], '0.1') self.assertEqual(ctx['body'][0][2]['display_value'], '12.34%') df['c'] = ['aaa', 'bbb'] ctx = df.style.format({"a": "{:0.1f}", "c": str.upper})._translate() self.assertEqual(ctx['body'][0][1]['display_value'], '0.1') self.assertEqual(ctx['body'][0][3]['display_value'], 'AAA') @tm.mplskip class TestStylerMatplotlibDep(TestCase): def test_background_gradient(self): df = pd.DataFrame([[1, 2], [2, 4]], columns=['A', 'B']) for axis in [0, 1, 'index', 'columns']: for cmap in [None, 'YlOrRd']: result = df.style.background_gradient(cmap=cmap)._compute().ctx self.assertTrue(all("#" in x[0] for x in result.values())) self.assertEqual(result[(0, 0)], result[(0, 1)]) self.assertEqual(result[(1, 0)], result[(1, 1)]) result = (df.style.background_gradient(subset=pd.IndexSlice[1, 'A']) ._compute().ctx) self.assertEqual(result[(1, 0)], ['background-color: #fff7fb'])
pjryan126/solid-start-careers
store/api/zillow/venv/lib/python2.7/site-packages/pandas/tests/test_style.py
Python
gpl-2.0
23,908
import grpc import pandas as pd import skl_pb2 import predict_pb2 import model_pb2 from utils import pandas_to_proto def run(): df = pd.DataFrame(columns=list('abc'), data=pd.np.random.rand(10, 3)) channel = grpc.insecure_channel('localhost:50051') stub = skl_pb2.PredictionServiceStub(channel) print("-------------- Predict --------------") model_spec = model_pb2.ModelSpec(model_id='123abc') req = predict_pb2.PredictionRequest(model_spec=model_spec, input=pandas_to_proto(df)) pred = stub.Predict(req) print(pred) if __name__ == '__main__': run()
pprett/grpc-kubernetes-skl-tutorial
skl-server/skl_client.py
Python
bsd-3-clause
592
# -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-03-29 19:49 from __future__ import unicode_literals import django.contrib.gis.db.models.fields from django.db import migrations, models import django.db.models.deletion import swapper from ..util import add_continents as util_add_continents def get_model(apps, name): model_tuple = swapper.split(swapper.get_model_name('cities', name)) return apps.get_model(*model_tuple) def add_continents(apps, schema_editor): util_add_continents(get_model(apps, 'Continent')) def rm_continents(apps, schema_editor): # The table is going to be nuked anyway, we just don't want RunPython() # to throw an exception on backwards migrations pass def add_continent_fks(apps, schema_editor): Country = get_model(apps, 'Country') Continent = get_model(apps, 'Continent') for continent in Continent.objects.all(): Country.objects.filter(continent_code=continent.code).update(continent=continent) def rm_continent_fks(apps, schema_editor): Country = get_model(apps, 'Country') Continent = get_model(apps, 'Continent') for continent in Continent.objects.all(): Country.objects.filter(continent=continent).update(continent_code=continent.code) class Migration(migrations.Migration): dependencies = [ ('cities', '0001_initial'), swapper.dependency('cities', 'Country'), ] operations = [ migrations.CreateModel( name='Continent', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(db_index=True, max_length=200, verbose_name='ascii name')), ('slug', models.CharField(max_length=200, unique=True)), ('code', models.CharField(db_index=True, max_length=2, unique=True)), ], options={ 'abstract': False, 'swappable': swapper.swappable_setting('cities', 'Continent'), }, ), migrations.AddField( model_name='continent', name='alt_names', field=models.ManyToManyField(related_name='cities_continents', to='cities.AlternativeName'), ), migrations.RenameField( model_name='country', old_name='continent', new_name='continent_code', ), migrations.AddField( model_name='country', name='continent', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='countries', to=swapper.get_model_name('cities', 'Continent')), ), migrations.RunPython(add_continents, rm_continents), migrations.RunPython(add_continent_fks, rm_continent_fks), migrations.RemoveField( model_name='country', name='continent_code', ), ]
coderholic/django-cities
cities/migrations/0002_continent_models_and_foreign_keys.py
Python
mit
2,948
# text2numbers.py # A program to convert a textual message into a sequence of # numbers, utilizing the underlying Unicode encoding. def main(): print("This program converts a textual message into a sequence") print("of numbers representing the Unicode encoding of the message.\n") # Get the message to encode message = input("Please enter the message to encode: ") print("\nHere are the Unicode codes:") # Loop through the message and print out the Unicode values for ch in message: print(ord(ch), end=" ") print() # blank line before prompt main()
cynthiacarter/Code-ch05
text2numbers.py
Python
mit
639
# Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from neutron.plugins.common import constants as p_const from neutron.plugins.ml2.drivers import type_gre from neutron.tests.unit.ml2 import test_type_tunnel from neutron.tests.unit import testlib_api TUNNEL_IP_ONE = "10.10.10.10" TUNNEL_IP_TWO = "10.10.10.20" HOST_ONE = 'fake_host_one' HOST_TWO = 'fake_host_two' class GreTypeTest(test_type_tunnel.TunnelTypeTestMixin, testlib_api.SqlTestCase): DRIVER_CLASS = type_gre.GreTypeDriver TYPE = p_const.TYPE_GRE def test_add_endpoint(self): endpoint = self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE) self.assertEqual(TUNNEL_IP_ONE, endpoint.ip_address) self.assertEqual(HOST_ONE, endpoint.host) def test_add_endpoint_for_existing_tunnel_ip(self): self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE) with mock.patch.object(type_gre.LOG, 'warning') as log_warn: self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE) log_warn.assert_called_once_with(mock.ANY, TUNNEL_IP_ONE) def test_get_endpoint_by_host(self): self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE) host_endpoint = self.driver.get_endpoint_by_host(HOST_ONE) self.assertEqual(TUNNEL_IP_ONE, host_endpoint.ip_address) def test_get_endpoint_by_host_for_not_existing_host(self): ip_endpoint = self.driver.get_endpoint_by_host(HOST_TWO) self.assertIsNone(ip_endpoint) def test_get_endpoint_by_ip(self): self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE) ip_endpoint = self.driver.get_endpoint_by_ip(TUNNEL_IP_ONE) self.assertEqual(HOST_ONE, ip_endpoint.host) def test_get_endpoint_by_ip_for_not_existing_tunnel_ip(self): ip_endpoint = self.driver.get_endpoint_by_ip(TUNNEL_IP_TWO) self.assertIsNone(ip_endpoint) def test_get_endpoints(self): self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE) self.driver.add_endpoint(TUNNEL_IP_TWO, HOST_TWO) endpoints = self.driver.get_endpoints() for endpoint in endpoints: if endpoint['ip_address'] == TUNNEL_IP_ONE: self.assertEqual(HOST_ONE, endpoint['host']) elif endpoint['ip_address'] == TUNNEL_IP_TWO: self.assertEqual(HOST_TWO, endpoint['host']) def test_delete_endpoint(self): self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE) self.assertIsNone(self.driver.delete_endpoint(TUNNEL_IP_ONE)) # Get all the endpoints and verify its empty endpoints = self.driver.get_endpoints() self.assertNotIn(TUNNEL_IP_ONE, endpoints) class GreTypeMultiRangeTest(test_type_tunnel.TunnelTypeMultiRangeTestMixin, testlib_api.SqlTestCase): DRIVER_CLASS = type_gre.GreTypeDriver
projectcalico/calico-neutron
neutron/tests/unit/ml2/test_type_gre.py
Python
apache-2.0
3,430
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('users', '0001_initial'), ('community', '0006_auto_20150208_0818'), ] operations = [ migrations.CreateModel( name='JoinRequest', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('date_created', models.DateTimeField(auto_now_add=True)), ('is_approved', models.BooleanField(default=False)), ('approved_by', models.ForeignKey(related_name='approved_by', blank=True, to='users.SystersUser', null=True)), ('community', models.ForeignKey(to='community.Community')), ('user', models.ForeignKey(related_name='created_by', to='users.SystersUser')), ], options={ }, bases=(models.Model,), ), ]
willingc/portal
systers_portal/membership/migrations/0001_initial.py
Python
gpl-2.0
1,027
import numpy import shocktube import vtktools import matplotlib.pyplot as plt import matplotlib as mpl import subprocess import sys time_levels=numpy.arange(0,99) filename_part='shocktube_' for time_level in time_levels: filename_in = filename_part + str(time_level) + '.vtu' filename_out = filename_part + str(format(time_level,"03g")) + '.png' print 'Processing file' , filename_in , '...', vt=vtktools.vtu(filename_in) t=vt.GetScalarField('Time')[0] xyz=vt.GetLocations() x=xyz[:,0] p=vt.GetScalarField('Pressure') uvw=vt.GetVectorField('Velocity') u=uvw[:,0] rho=vt.GetScalarField('Density') ie=vt.GetScalarField('InternalEnergy') analytical_solution = numpy.array([shocktube.solution(xi,t) for xi in x]) analytical_p = analytical_solution[:,0] analytical_u=analytical_solution[:,1] analytical_rho=analytical_solution[:,2] analytical_ie=analytical_p/analytical_rho/(shocktube.gamma-1.0) fig = plt.figure() pressure_subplot = fig.add_subplot(4,1,1) pressure_subplot.plot( x, p,'.') pressure_subplot.plot( x, analytical_p,'-') plt.axis((x[0],x[-1],0.1,1.1)) plt.ylabel('p') velocity_subplot = fig.add_subplot(4,1,2) velocity_subplot.plot( x, u,'.') velocity_subplot.plot( x, analytical_u,'-') plt.axis((x[0],x[-1],-0.1,0.8)) plt.ylabel('u') density_subplot = fig.add_subplot(4,1,3) density_subplot.plot( x, rho,'.') density_subplot.plot( x, analytical_rho,'-') plt.axis((x[0],x[-1],0.1,1.1)) plt.ylabel('rho') internalEnergy_subplot = fig.add_subplot(4,1,4) internalEnergy_subplot.plot( x, ie,'.') internalEnergy_subplot.plot( x, analytical_ie,'-') plt.axis((x[0],x[-1],1.8,3.4)) plt.ylabel('e') plt.savefig(filename_out, dpi=100) print 'created file' , filename_out plt.close(fig) animation_command = ('mencoder', 'mf://shocktube*.png', '-mf', 'type=png:w=800:h=600:fps=12', '-ovc', 'lavc', '-lavcopts', 'vcodec=mpeg4', '-oac', 'copy', '-o', 'shocktube.avi') subprocess.check_call(animation_command)
rjferrier/fluidity
tests/shocktube_1d/animate_plot.py
Python
lgpl-2.1
2,232
import os from django.utils.translation import ugettext_lazy as _ from openstack_dashboard import exceptions DEBUG = False TEMPLATE_DEBUG = DEBUG # WEBROOT is the location relative to Webserver root # should end with a slash. WEBROOT = '/' # LOGIN_URL = WEBROOT + 'auth/login/' # LOGOUT_URL = WEBROOT + 'auth/logout/' # # LOGIN_REDIRECT_URL can be used as an alternative for # HORIZON_CONFIG.user_home, if user_home is not set. # Do not set it to '/home/', as this will cause circular redirect loop # LOGIN_REDIRECT_URL = WEBROOT # Required for Django 1.5. # If horizon is running in production (DEBUG is False), set this # with the list of host/domain names that the application can serve. # For more information see: # https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts #ALLOWED_HOSTS = ['horizon.example.com', ] # Set SSL proxy settings: # For Django 1.4+ pass this header from the proxy after terminating the SSL, # and don't forget to strip it from the client's request. # For more information see: # https://docs.djangoproject.com/en/1.4/ref/settings/#secure-proxy-ssl-header #SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https') # If Horizon is being served through SSL, then uncomment the following two # settings to better secure the cookies from security exploits #CSRF_COOKIE_SECURE = True #SESSION_COOKIE_SECURE = True # Overrides for OpenStack API versions. Use this setting to force the # OpenStack dashboard to use a specific API version for a given service API. # Versions specified here should be integers or floats, not strings. # NOTE: The version should be formatted as it appears in the URL for the # service API. For example, The identity service APIs have inconsistent # use of the decimal point, so valid options would be 2.0 or 3. #OPENSTACK_API_VERSIONS = { # "data-processing": 1.1, # "identity": 3, # "volume": 2, #} # Set this to True if running on multi-domain model. When this is enabled, it # will require user to enter the Domain name in addition to username for login. #OPENSTACK_KEYSTONE_MULTIDOMAIN_SUPPORT = False # Overrides the default domain used when running on single-domain model # with Keystone V3. All entities will be created in the default domain. #OPENSTACK_KEYSTONE_DEFAULT_DOMAIN = 'Default' # Set Console type: # valid options would be "AUTO"(default), "VNC", "SPICE", "RDP", "SERIAL" or None # Set to None explicitly if you want to deactivate the console. #CONSOLE_TYPE = "AUTO" # Default OpenStack Dashboard configuration. HORIZON_CONFIG = { 'user_home': 'openstack_dashboard.views.get_user_home', 'ajax_queue_limit': 10, 'auto_fade_alerts': { 'delay': 3000, 'fade_duration': 1500, 'types': ['alert-success', 'alert-info'] }, 'help_url': "http://docs.openstack.org", 'exceptions': {'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED}, 'modal_backdrop': 'static', 'angular_modules': [], 'js_files': [], 'js_spec_files': [], } # Specify a regular expression to validate user passwords. #HORIZON_CONFIG["password_validator"] = { # "regex": '.*', # "help_text": _("Your password does not meet the requirements."), #} # Disable simplified floating IP address management for deployments with # multiple floating IP pools or complex network requirements. #HORIZON_CONFIG["simple_ip_management"] = False # Turn off browser autocompletion for forms including the login form and # the database creation workflow if so desired. #HORIZON_CONFIG["password_autocomplete"] = "off" # Setting this to True will disable the reveal button for password fields, # including on the login form. #HORIZON_CONFIG["disable_password_reveal"] = False LOCAL_PATH = os.path.dirname(os.path.abspath(__file__)) # Set custom secret key: # You can either set it to a specific value or you can let horizon generate a # default secret key that is unique on this machine, e.i. regardless of the # amount of Python WSGI workers (if used behind Apache+mod_wsgi): However, there # may be situations where you would want to set this explicitly, e.g. when # multiple dashboard instances are distributed on different machines (usually # behind a load-balancer). Either you have to make sure that a session gets all # requests routed to the same dashboard instance or you set the same SECRET_KEY # for all of them. from horizon.utils import secret_key SECRET_KEY = secret_key.generate_or_read_from_file('/var/lib/openstack-dashboard/secret_key') # We recommend you use memcached for development; otherwise after every reload # of the django development server, you will have to login again. To use # memcached set CACHES to something like CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': '{{controller_addr}}:11211', } } #CACHES = { # 'default': { # 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', # } #} # Send email to the console by default EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Or send them to /dev/null #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' # Configure these for your outgoing email host #EMAIL_HOST = 'smtp.my-company.com' #EMAIL_PORT = 25 #EMAIL_HOST_USER = 'djangomail' #EMAIL_HOST_PASSWORD = 'top-secret!' # For multiple regions uncomment this configuration, and add (endpoint, title). #AVAILABLE_REGIONS = [ # ('http://cluster1.example.com:5000/v2.0', 'cluster1'), # ('http://cluster2.example.com:5000/v2.0', 'cluster2'), #] OPENSTACK_HOST = "{{public_addr}}" OPENSTACK_KEYSTONE_URL = "http://%s:5000/v2.0" % OPENSTACK_HOST OPENSTACK_KEYSTONE_DEFAULT_ROLE = "_member_" # Enables keystone web single-sign-on if set to True. #WEBSSO_ENABLED = False # Determines which authentication choice to show as default. #WEBSSO_INITIAL_CHOICE = "credentials" # The list of authentication mechanisms # which include keystone federation protocols. # Current supported protocol IDs are 'saml2' and 'oidc' # which represent SAML 2.0, OpenID Connect respectively. # Do not remove the mandatory credentials mechanism. #WEBSSO_CHOICES = ( # ("credentials", _("Keystone Credentials")), # ("oidc", _("OpenID Connect")), # ("saml2", _("Security Assertion Markup Language"))) # Disable SSL certificate checks (useful for self-signed certificates): #OPENSTACK_SSL_NO_VERIFY = True # The CA certificate to use to verify SSL connections #OPENSTACK_SSL_CACERT = '/path/to/cacert.pem' # The OPENSTACK_KEYSTONE_BACKEND settings can be used to identify the # capabilities of the auth backend for Keystone. # If Keystone has been configured to use LDAP as the auth backend then set # can_edit_user to False and name to 'ldap'. # # TODO(tres): Remove these once Keystone has an API to identify auth backend. OPENSTACK_KEYSTONE_BACKEND = { 'name': 'native', 'can_edit_user': True, 'can_edit_group': True, 'can_edit_project': True, 'can_edit_domain': True, 'can_edit_role': True, } # Setting this to True, will add a new "Retrieve Password" action on instance, # allowing Admin session password retrieval/decryption. #OPENSTACK_ENABLE_PASSWORD_RETRIEVE = False # The Launch Instance user experience has been significantly enhanced. # You can choose whether to enable the new launch instance experience, # the legacy experience, or both. The legacy experience will be removed # in a future release, but is available as a temporary backup setting to ensure # compatibility with existing deployments. Further development will not be # done on the legacy experience. Please report any problems with the new # experience via the Launchpad tracking system. # # Toggle LAUNCH_INSTANCE_LEGACY_ENABLED and LAUNCH_INSTANCE_NG_ENABLED to # determine the experience to enable. Set them both to true to enable # both. #LAUNCH_INSTANCE_LEGACY_ENABLED = True #LAUNCH_INSTANCE_NG_ENABLED = False # The Xen Hypervisor has the ability to set the mount point for volumes # attached to instances (other Hypervisors currently do not). Setting # can_set_mount_point to True will add the option to set the mount point # from the UI. OPENSTACK_HYPERVISOR_FEATURES = { 'can_set_mount_point': False, 'can_set_password': False, } # The OPENSTACK_CINDER_FEATURES settings can be used to enable optional # services provided by cinder that is not exposed by its extension API. OPENSTACK_CINDER_FEATURES = { 'enable_backup': False, } # The OPENSTACK_NEUTRON_NETWORK settings can be used to enable optional # services provided by neutron. Options currently available are load # balancer service, security groups, quotas, VPN service. OPENSTACK_NEUTRON_NETWORK = { 'enable_router': True, 'enable_quotas': True, 'enable_ipv6': True, 'enable_distributed_router': False, 'enable_ha_router': False, 'enable_lb': True, 'enable_firewall': True, 'enable_vpn': True, # The profile_support option is used to detect if an external router can be # configured via the dashboard. When using specific plugins the # profile_support can be turned on if needed. 'profile_support': None, #'profile_support': 'cisco', # Set which provider network types are supported. Only the network types # in this list will be available to choose from when creating a network. # Network types include local, flat, vlan, gre, and vxlan. 'supported_provider_types': ['*'], # Set which VNIC types are supported for port binding. Only the VNIC # types in this list will be available to choose from when creating a # port. # VNIC types include 'normal', 'macvtap' and 'direct'. 'supported_vnic_types': ['*'] } # The OPENSTACK_IMAGE_BACKEND settings can be used to customize features # in the OpenStack Dashboard related to the Image service, such as the list # of supported image formats. #OPENSTACK_IMAGE_BACKEND = { # 'image_formats': [ # ('', _('Select format')), # ('aki', _('AKI - Amazon Kernel Image')), # ('ami', _('AMI - Amazon Machine Image')), # ('ari', _('ARI - Amazon Ramdisk Image')), # ('iso', _('ISO - Optical Disk Image')), # ('ova', _('OVA - Open Virtual Appliance')), # ('qcow2', _('QCOW2 - QEMU Emulator')), # ('raw', _('Raw')), # ('vdi', _('VDI - Virtual Disk Image')), # ('vhd', ('VHD - Virtual Hard Disk')), # ('vmdk', _('VMDK - Virtual Machine Disk')), # ] #} # The IMAGE_CUSTOM_PROPERTY_TITLES settings is used to customize the titles for # image custom property attributes that appear on image detail pages. IMAGE_CUSTOM_PROPERTY_TITLES = { "architecture": _("Architecture"), "kernel_id": _("Kernel ID"), "ramdisk_id": _("Ramdisk ID"), "image_state": _("Euca2ools state"), "project_id": _("Project ID"), "image_type": _("Image Type"), } # The IMAGE_RESERVED_CUSTOM_PROPERTIES setting is used to specify which image # custom properties should not be displayed in the Image Custom Properties # table. IMAGE_RESERVED_CUSTOM_PROPERTIES = [] # OPENSTACK_ENDPOINT_TYPE specifies the endpoint type to use for the endpoints # in the Keystone service catalog. Use this setting when Horizon is running # external to the OpenStack environment. The default is 'publicURL'. OPENSTACK_ENDPOINT_TYPE = "publicURL" # SECONDARY_ENDPOINT_TYPE specifies the fallback endpoint type to use in the # case that OPENSTACK_ENDPOINT_TYPE is not present in the endpoints # in the Keystone service catalog. Use this setting when Horizon is running # external to the OpenStack environment. The default is None. This # value should differ from OPENSTACK_ENDPOINT_TYPE if used. #SECONDARY_ENDPOINT_TYPE = "publicURL" # The number of objects (Swift containers/objects or images) to display # on a single page before providing a paging element (a "more" link) # to paginate results. API_RESULT_LIMIT = 1000 API_RESULT_PAGE_SIZE = 20 # The size of chunk in bytes for downloading objects from Swift SWIFT_FILE_TRANSFER_CHUNK_SIZE = 512 * 1024 # Specify a maximum number of items to display in a dropdown. DROPDOWN_MAX_ITEMS = 30 # The timezone of the server. This should correspond with the timezone # of your entire OpenStack installation, and hopefully be in UTC. TIME_ZONE = "UTC" # When launching an instance, the menu of available flavors is # sorted by RAM usage, ascending. If you would like a different sort order, # you can provide another flavor attribute as sorting key. Alternatively, you # can provide a custom callback method to use for sorting. You can also provide # a flag for reverse sort. For more info, see # http://docs.python.org/2/library/functions.html#sorted #CREATE_INSTANCE_FLAVOR_SORT = { # 'key': 'name', # # or # 'key': my_awesome_callback_method, # 'reverse': False, #} # Set this to True to display an 'Admin Password' field on the Change Password # form to verify that it is indeed the admin logged-in who wants to change # the password. # ENFORCE_PASSWORD_CHECK = False # Modules that provide /auth routes that can be used to handle different types # of user authentication. Add auth plugins that require extra route handling to # this list. #AUTHENTICATION_URLS = [ # 'openstack_auth.urls', #] # The Horizon Policy Enforcement engine uses these values to load per service # policy rule files. The content of these files should match the files the # OpenStack services are using to determine role based access control in the # target installation. # Path to directory containing policy.json files #POLICY_FILES_PATH = os.path.join(ROOT_PATH, "conf") # Map of local copy of service policy files #POLICY_FILES = { # 'identity': 'keystone_policy.json', # 'compute': 'nova_policy.json', # 'volume': 'cinder_policy.json', # 'image': 'glance_policy.json', # 'orchestration': 'heat_policy.json', # 'network': 'neutron_policy.json', # 'telemetry': 'ceilometer_policy.json', #} # Trove user and database extension support. By default support for # creating users and databases on database instances is turned on. # To disable these extensions set the permission here to something # unusable such as ["!"]. # TROVE_ADD_USER_PERMS = [] # TROVE_ADD_DATABASE_PERMS = [] # Change this patch to the appropriate static directory containing # two files: _variables.scss and _styles.scss #CUSTOM_THEME_PATH = 'static/themes/default' LOGGING = { 'version': 1, # When set to True this will disable all logging except # for loggers specified in this configuration dictionary. Note that # if nothing is specified here and disable_existing_loggers is True, # django.db.backends will still log unless it is disabled explicitly. 'disable_existing_loggers': False, 'handlers': { 'null': { 'level': 'DEBUG', 'class': 'django.utils.log.NullHandler', }, 'console': { # Set the level to "DEBUG" for verbose output logging. 'level': 'INFO', 'class': 'logging.StreamHandler', }, }, 'loggers': { # Logging from django.db.backends is VERY verbose, send to null # by default. 'django.db.backends': { 'handlers': ['null'], 'propagate': False, }, 'requests': { 'handlers': ['null'], 'propagate': False, }, 'horizon': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'openstack_dashboard': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'novaclient': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'cinderclient': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'keystoneclient': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'glanceclient': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'neutronclient': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'heatclient': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'ceilometerclient': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'troveclient': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'swiftclient': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'openstack_auth': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'nose.plugins.manager': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'django': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'iso8601': { 'handlers': ['null'], 'propagate': False, }, 'scss': { 'handlers': ['null'], 'propagate': False, }, } } # 'direction' should not be specified for all_tcp/udp/icmp. # It is specified in the form. SECURITY_GROUP_RULES = { 'all_tcp': { 'name': _('All TCP'), 'ip_protocol': 'tcp', 'from_port': '1', 'to_port': '65535', }, 'all_udp': { 'name': _('All UDP'), 'ip_protocol': 'udp', 'from_port': '1', 'to_port': '65535', }, 'all_icmp': { 'name': _('All ICMP'), 'ip_protocol': 'icmp', 'from_port': '-1', 'to_port': '-1', }, 'ssh': { 'name': 'SSH', 'ip_protocol': 'tcp', 'from_port': '22', 'to_port': '22', }, 'smtp': { 'name': 'SMTP', 'ip_protocol': 'tcp', 'from_port': '25', 'to_port': '25', }, 'dns': { 'name': 'DNS', 'ip_protocol': 'tcp', 'from_port': '53', 'to_port': '53', }, 'http': { 'name': 'HTTP', 'ip_protocol': 'tcp', 'from_port': '80', 'to_port': '80', }, 'pop3': { 'name': 'POP3', 'ip_protocol': 'tcp', 'from_port': '110', 'to_port': '110', }, 'imap': { 'name': 'IMAP', 'ip_protocol': 'tcp', 'from_port': '143', 'to_port': '143', }, 'ldap': { 'name': 'LDAP', 'ip_protocol': 'tcp', 'from_port': '389', 'to_port': '389', }, 'https': { 'name': 'HTTPS', 'ip_protocol': 'tcp', 'from_port': '443', 'to_port': '443', }, 'smtps': { 'name': 'SMTPS', 'ip_protocol': 'tcp', 'from_port': '465', 'to_port': '465', }, 'imaps': { 'name': 'IMAPS', 'ip_protocol': 'tcp', 'from_port': '993', 'to_port': '993', }, 'pop3s': { 'name': 'POP3S', 'ip_protocol': 'tcp', 'from_port': '995', 'to_port': '995', }, 'ms_sql': { 'name': 'MS SQL', 'ip_protocol': 'tcp', 'from_port': '1433', 'to_port': '1433', }, 'mysql': { 'name': 'MYSQL', 'ip_protocol': 'tcp', 'from_port': '3306', 'to_port': '3306', }, 'rdp': { 'name': 'RDP', 'ip_protocol': 'tcp', 'from_port': '3389', 'to_port': '3389', }, } # Deprecation Notice: # # The setting FLAVOR_EXTRA_KEYS has been deprecated. # Please load extra spec metadata into the Glance Metadata Definition Catalog. # # The sample quota definitions can be found in: # <glance_source>/etc/metadefs/compute-quota.json # # The metadata definition catalog supports CLI and API: # $glance --os-image-api-version 2 help md-namespace-import # $glance-manage db_load_metadefs <directory_with_definition_files> # # See Metadata Definitions on: http://docs.openstack.org/developer/glance/ # Indicate to the Sahara data processing service whether or not # automatic floating IP allocation is in effect. If it is not # in effect, the user will be prompted to choose a floating IP # pool for use in their cluster. False by default. You would want # to set this to True if you were running Nova Networking with # auto_assign_floating_ip = True. #SAHARA_AUTO_IP_ALLOCATION_ENABLED = False # The hash algorithm to use for authentication tokens. This must # match the hash algorithm that the identity server and the # auth_token middleware are using. Allowed values are the # algorithms supported by Python's hashlib library. #OPENSTACK_TOKEN_HASH_ALGORITHM = 'md5' # AngularJS requires some settings to be made available to # the client side. Some settings are required by in-tree / built-in horizon # features. These settings must be added to REST_API_REQUIRED_SETTINGS in the # form of ['SETTING_1','SETTING_2'], etc. # # You may remove settings from this list for security purposes, but do so at # the risk of breaking a built-in horizon feature. These settings are required # for horizon to function properly. Only remove them if you know what you # are doing. These settings may in the future be moved to be defined within # the enabled panel configuration. # You should not add settings to this list for out of tree extensions. # See: https://wiki.openstack.org/wiki/Horizon/RESTAPI REST_API_REQUIRED_SETTINGS = ['OPENSTACK_HYPERVISOR_FEATURES'] # Additional settings can be made available to the client side for # extensibility by specifying them in REST_API_ADDITIONAL_SETTINGS # !! Please use extreme caution as the settings are transferred via HTTP/S # and are not encrypted on the browser. This is an experimental API and # may be deprecated in the future without notice. #REST_API_ADDITIONAL_SETTINGS = [] ############################################################################### # Ubuntu Settings ############################################################################### # Enable the Ubuntu theme if it is present. try: from ubuntu_theme import * except ImportError: pass # Default Ubuntu apache configuration uses /horizon as the application root. WEBROOT='/horizon/' # By default, validation of the HTTP Host header is disabled. Production # installations should have this set accordingly. For more information # see https://docs.djangoproject.com/en/dev/ref/settings/. ALLOWED_HOSTS = '*' # Compress all assets offline as part of packaging installation COMPRESS_OFFLINE = True USE_X_FORWARDED_HOST = True
sandvine/os-ansible-deployment-lite
ansible/roles/horizon/templates/local_settings.py
Python
apache-2.0
22,975
""" Django settings for mediapanel project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'zlcz&x&%!4q%p0_7=r4=(#54dm4wtpzq#1=t4pjjoc2%31e%de' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'agenda', 'background', 'panel', 'weather', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'mediapanel.urls' WSGI_APPLICATION = 'mediapanel.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'Europe/Amsterdam' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = ( 'Photo albums', )
Ultimatum22/MediaPanel
mediapanel/settings.py
Python
apache-2.0
2,171
#!/usr/bin/env python # this program is used to test latency # don't test RTT bigger than 3 secs - it will break # we make sure that nothing breaks if there is a packet missing # this can rarely happen import select import socket import time import sys import struct def pong(): # easy, receive and send back s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.bind(('0.0.0.0', 1234)) while True: c, addr = s.recvfrom(1) s.sendto(c, (addr[0], 1235)) if c == 'x': break print 'Finished' return 0 def ping(addr, n): # send and wait for it back s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.bind(('0.0.0.0', 1235)) succ = 0 errs = 0 while succ != n and errs < 3: # at most 3 lost packets time.sleep(0.02) # wait a bit start = time.time() s.sendto('r', (addr, 1234)) h, _, _ = select.select([s], [], [], 3) # wait 3 seconds end = time.time() if h == []: # lost packet # print '# lost packet' errs += 1 continue s.recv(1) # eat the response succ += 1 print '%.8f' % (end - start) for x in xrange(10): # send many packets to be (almost) sure the other end is done s.sendto('x', (addr, 1234)) return errs >= 3 if __name__ == '__main__': if 'ping' in sys.argv: ret = ping(sys.argv[2], int(sys.argv[3])) elif 'pong' in sys.argv: ret = pong() else: print 'ping or pong?' ret = 1 sys.exit(ret)
olbat/distem
test/experimental_testing/exps/latency.py
Python
gpl-3.0
1,573
# Copyright (C) 2011 Daniele Simonetti # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. from PySide import QtCore, QtGui import dialogs class Sink3(QtCore.QObject): def __init__(self, parent = None): super(Sink3, self).__init__(parent) self.form = parent def show_add_weapon(self): form = self.form dlg = dialogs.ChooseItemDialog(form.pc, 'weapon', form.dstore, form) filter = self.sender().parent().property('filter') if filter is not None: dlg.set_filter(filter) if dlg.exec_() == QtGui.QDialog.DialogCode.Accepted: form.update_from_model() def show_add_cust_weapon(self): form = self.form dlg = dialogs.CustomWeaponDialog(form.pc, form.dstore, form) if dlg.exec_() == QtGui.QDialog.DialogCode.Accepted: form.update_from_model() def edit_selected_weapon(self): form = self.form view_ = None try: view_ = self.sender().parent().property('source') except Exception as e: print repr(e) if view_ is None: return sel_idx = view_.selectionModel().currentIndex() if not sel_idx.isValid(): return sel_itm = view_.model().data(sel_idx, QtCore.Qt.UserRole) dlg = dialogs.CustomWeaponDialog(form.pc, form.dstore, form) dlg.edit_mode = True print('loading weap {0}, tags: {1}'.format(sel_itm.name, sel_itm.tags)) dlg.load_item(sel_itm) if dlg.exec_() == QtGui.QDialog.DialogCode.Accepted: form.update_from_model() def remove_selected_weapon(self): form = self.form view_ = None try: view_ = self.sender().parent().property('source') except Exception as e: print repr(e) if view_ is None: return sel_idx = view_.selectionModel().currentIndex() if not sel_idx.isValid(): return sel_itm = view_.model().data(sel_idx, QtCore.Qt.UserRole) form.pc.weapons.remove(sel_itm) form.update_from_model() def on_increase_item_qty(self): form = self.form view_ = None try: view_ = self.sender().parent().property('source') except Exception as e: print repr(e) if view_ is None: return sel_idx = view_.selectionModel().currentIndex() if not sel_idx.isValid(): return sel_itm = view_.model().data(sel_idx, QtCore.Qt.UserRole) if sel_itm.qty < 9999: sel_itm.qty += 1 form.update_from_model() sel_idx = view_.model().index(sel_idx.row(), 0) view_.selectionModel().setCurrentIndex(sel_idx, QtGui.QItemSelectionModel.Select | QtGui.QItemSelectionModel.Rows) def on_decrease_item_qty(self): form = self.form view_ = None try: view_ = self.sender().parent().property('source') except Exception as e: print repr(e) if view_ is None: return sel_idx = view_.selectionModel().currentIndex() if not sel_idx.isValid(): return sel_itm = view_.model().data(sel_idx, QtCore.Qt.UserRole) if sel_itm.qty > 1: sel_itm.qty -= 1 form.update_from_model() sel_idx = view_.model().index(sel_idx.row(), 0) view_.selectionModel().setCurrentIndex(sel_idx, QtGui.QItemSelectionModel.Select | QtGui.QItemSelectionModel.Rows)
tectronics/l5rcm
sinks/sink_3.py
Python
gpl-3.0
4,505
from __future__ import unicode_literals import importlib import logging from celery.signals import worker_process_init from django.conf.urls import include, url from django.contrib.auth.models import User, AnonymousUser from django.conf import settings from temba.channels.views import register, sync from django.views.i18n import javascript_catalog from django.conf.urls.static import static # javascript translation packages js_info_dict = { 'packages': (), # this is empty due to the fact that all translation are in one folder } urlpatterns = [ url(r'^', include('temba.public.urls')), url(r'^', include('temba.msgs.urls')), url(r'^', include('temba.contacts.urls')), url(r'^', include('temba.orgs.urls')), url(r'^', include('temba.schedules.urls')), url(r'^', include('temba.flows.urls')), url(r'^', include('temba.reports.urls')), url(r'^', include('temba.triggers.urls')), url(r'^', include('temba.campaigns.urls')), url(r'^', include('temba.ivr.urls')), url(r'^', include('temba.locations.urls')), url(r'^', include('temba.api.urls')), url(r'^', include('temba.channels.urls')), url(r'^', include('temba.airtime.urls')), url(r'^relayers/relayer/sync/(\d+)/$', sync, {}, 'sync'), url(r'^relayers/relayer/register/$', register, {}, 'register'), url(r'^users/', include('smartmin.users.urls')), url(r'^imports/', include('smartmin.csv_imports.urls')), url(r'^assets/', include('temba.assets.urls')), url(r'^jsi18n/$', javascript_catalog, js_info_dict, name='django.views.i18n.javascript_catalog'), ] if settings.DEBUG: urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) # import any additional urls for app in settings.APP_URLS: # pragma: needs cover importlib.import_module(app) # provide a utility method to initialize our analytics def init_analytics(): import analytics analytics_key = getattr(settings, 'SEGMENT_IO_KEY', None) if analytics_key: # pragma: needs cover analytics.init(analytics_key, send=settings.IS_PROD, log=not settings.IS_PROD, log_level=logging.DEBUG) from temba.utils.analytics import init_librato librato_user = getattr(settings, 'LIBRATO_USER', None) librato_token = getattr(settings, 'LIBRATO_TOKEN', None) if librato_user and librato_token: # pragma: needs cover init_librato(librato_user, librato_token) # initialize our analytics (the signal below will initialize each worker) init_analytics() @worker_process_init.connect def configure_workers(sender=None, **kwargs): init_analytics() # pragma: needs cover def track_user(self): # pragma: no cover """ Should the current user be tracked """ # don't track unless we are on production if not settings.IS_PROD: return False # always track them if they haven't logged in if not self.is_authenticated() or self.is_anonymous(): return True # never track nyaruka email accounts if 'nyaruka' in self.email: return False # never track nyaruka org org = self.get_org() if org and org.name and 'nyaruka' in org.name.lower(): return False return True User.track_user = track_user AnonymousUser.track_user = track_user def handler500(request): """ 500 error handler which includes ``request`` in the context. Templates: `500.html` Context: None """ from django.template import Context, loader from django.http import HttpResponseServerError t = loader.get_template('500.html') return HttpResponseServerError(t.render(Context({'request': request}))) # pragma: needs cover
tsotetsi/textily-web
temba/urls.py
Python
agpl-3.0
3,662
""" Module matching %GC compo distribution b/w fg and bg. """ import sys import random from Bio import SeqIO from utils import GC def fg_GC_bins(fg_file): """ Compute G+C content for all sequences in the foreground. It computes the %GC content and store the information in a list. To each G+C percentage bin, we associate the number of sequences falling in the corresponding bin. Return lists of GC contents, GC bins, and lengths distrib. """ stream = open(fg_file) gc_bins = [0] * 101 gc_list = [] lengths = [] for record in SeqIO.parse(stream, "fasta"): gc = GC(record.seq) gc_list.append(gc) gc_bins[gc] += 1 lengths.append(len(record.seq)) stream.close() return gc_list, gc_bins, lengths def fg_len_GC_bins(fg_file): """ Compute G+C content for all sequences in the foreground. Computes %GC contant and store the information in a list. To each G+C percentage bin, we associate the number of sequences falling in the corresponding bin. Return lists of GC contents, GC bins, and lengths distrib. """ stream = open(fg_file) gc_bins = [] for _ in range(0, 101): gc_bins.append({}) gc_list = [] lengths = [] for record in SeqIO.parse(stream, "fasta"): gc = GC(record.seq) gc_list.append(gc) length = len(record) lengths.append(length) if length in gc_bins[gc]: gc_bins[gc][length] += 1 else: gc_bins[gc][length] = 1 stream.close() return gc_list, gc_bins, lengths def print_rec(rec, stream): """ Print a record to a stream output. """ stream.write("{0}\n".format(rec.format("fasta"))) def print_in_bg_dir(gc_bins, bg_dir, with_len=False): """ Print the sequences in the bg directory in bin files. """ for percent in xrange(0, 101): with open("{0}/bg_bin_{1}.txt".format(bg_dir, percent), 'w') as stream: if with_len: for length in gc_bins[percent]: for rec in gc_bins[percent][length]: print_rec(rec, stream) else: for rec in gc_bins[percent]: print_rec(rec, stream) def bg_GC_bins(bg_file, bg_dir): """ Compute G+C content for all sequences in the background. Compute and store the GC information in a list. To each G+C percentage bin, we associate the corresponding sequence names. Files representing the binning are stored in the "bg_dir" directory. Return lists of GC contents, GC bins, and lengths distrib. """ stream = open(bg_file) gc_bins = [] gc_list = [] lengths = [] for _ in xrange(0, 101): gc_bins.append([]) for record in SeqIO.parse(stream, "fasta"): gc = GC(record.seq) gc_list.append(gc) gc_bins[gc].append(record) lengths.append(len(record.seq)) stream.close() print_in_bg_dir(gc_bins, bg_dir) return gc_list, gc_bins, lengths def bg_len_GC_bins(bg_file, bg_dir): """ Compute G+C content for all sequences in the background. Compute and store the %GC information in a list. To each G+C percentage bin, we associate the corresponding sequence names. Return lists of GC contents, GC bins, and lengths distrib. """ stream = open(bg_file) gc_bins = [] gc_list = [] lengths = [] for _ in range(0, 101): gc_bins.append({}) for record in SeqIO.parse(stream, "fasta"): gc = GC(record.seq) gc_list.append(gc) if len(record) in gc_bins[gc]: gc_bins[gc][len(record)].append(record) else: gc_bins[gc][len(record)] = [record] lengths.append(len(record.seq)) stream.close() print_in_bg_dir(gc_bins, bg_dir, True) return gc_list, gc_bins, lengths def get_bins_from_bg_dir(bg_dir, percent): """ Return the sequences from the corresponding bin file. """ with open("{0}/bg_bin_{1:d}.txt".format(bg_dir, percent)) as stream: bin_seq = [] for record in SeqIO.parse(stream, "fasta"): bin_seq.append(record) return bin_seq def generate_sequences(fg_bins, bg_bins, bg_dir, nfold): """ Choose randomly the background sequences in each bin of %GC. Follow the same distribution as the one of foreground sequences with a nfold ratio. Return the list of %GC and length distrib. """ lengths = [] gc_list = [] for percent in range(0, 101): if fg_bins[percent]: random.seed() try: nb = fg_bins[percent] * nfold if bg_bins: bin_seq = bg_bins[percent] else: bin_seq = get_bins_from_bg_dir(bg_dir, percent) sample = random.sample(bin_seq, nb) gc_list.extend([percent] * nb) except ValueError: sys.stderr.write("""*** WARNING *** Sample larger than population for {0:d}% G+C content: {1:d} needed and {2:d} obtained\n""".format( percent, fg_bins[percent] * nfold, len(bin_seq))) sample = bin_seq gc_list.extend([percent] * len(bin_seq)) for r in sample: print r.format("fasta"), lengths.append(len(r.seq)) return gc_list, lengths def extract_seq_rec(size, nb, bg_keys, bg, accu, index): """ Extract "nb" sequences with sizes equal to "size" nt. We try to get exact size or as close as possible to "size" nt. This is a tail recursive function with the accumulator "accu" looking for sizes "bg_keys" in the bg set "bg". Return the accumulator and the number of found sequences. """ if not (bg_keys and nb): # End of the recursion since we have no sequence # in the bg or enough retrieved (nb=0) return accu, nb if index > len(bg_keys) - 1: return extract_seq_rec(size, nb, bg_keys, bg, accu, index - 1) if not bg_keys: # No more size in the background to be checked so return # what was in the previous size bin if bg[bg_keys[index - 1]]: random.shuffle(bg[bg_keys[index - 1]]) accu.extend(bg[bg_keys[index - 1]][0:nb]) bg[bg_keys[index - 1]] = bg[index - 1][nb:] return accu, nb - len(bg[bg_keys[index - 1]][0:nb]) else: return accu, nb if bg_keys[index] >= size: # No need to go further in the different sizes # within the background if (index == 0 or not bg[bg_keys[index - 1]] or bg_keys[index] - size < size - bg_keys[index - 1]): # Which # size is the closest to the expected one? => we go for the current one # if YES random.shuffle(bg[bg_keys[index]]) accu.extend(bg[bg_keys[index]][0:nb]) new_nb = nb - len(bg[bg_keys[index]][0:nb]) if bg[bg_keys[index]][nb:]: # Check that there is sequences in the # background for this size bin bg[bg_keys[index]] = bg[bg_keys[index]][nb:] return extract_seq_rec(size, new_nb, bg_keys, bg, accu, index) else: bg[bg_keys[index]] = bg[bg_keys[index]][nb:] del bg_keys[index] return extract_seq_rec(size, new_nb, bg_keys, bg, accu, index) else: # The previous size was the closest random.shuffle(bg[bg_keys[index - 1]]) accu.extend(bg[bg_keys[index - 1]][0:nb]) new_nb = nb - len(bg[bg_keys[index - 1]][0:nb]) if bg[bg_keys[index - 1]][nb:]: # Check that there is sequences in # the background for this size bin bg[bg_keys[index - 1]] = bg[bg_keys[index - 1]][nb:] return extract_seq_rec(size, new_nb, bg_keys, bg, accu, index) else: bg[bg_keys[index - 1]] = bg[bg_keys[index - 1]][nb:] del bg_keys[index - 1] return extract_seq_rec(size, new_nb, bg_keys, bg, accu, index - 1) elif index == len(bg_keys) - 1: random.shuffle(bg[bg_keys[index]]) accu.extend(bg[bg_keys[index]][0:nb]) new_nb = nb - len(bg[bg_keys[index]][0:nb]) if bg[bg_keys[index]][nb:]: bg[bg_keys[index]] = bg[bg_keys[index]][nb:] return extract_seq_rec(size, new_nb, bg_keys, bg, accu, index) else: bg[bg_keys[index]] = bg[bg_keys[index]][nb:] del bg_keys[index] return extract_seq_rec(size, new_nb, bg_keys, bg, accu, index) else: return extract_seq_rec(size, nb, bg_keys, bg, accu, index + 1) def get_bins_len_from_bg_dir(bg_dir, percent): """ Return the sequences from the corresponding bin file. """ with open("{0}/bg_bin_{1:d}.txt".format(bg_dir, percent)) as stream: bin_seq = {} for record in SeqIO.parse(stream, "fasta"): length = len(record) if length in bin_seq: bin_seq[length].append(record) else: bin_seq[length] = [record] return bin_seq def generate_len_sequences(fg, bg, bg_dir, nfold): """ Extract the sequences from the bg with similar sizes as in the fg. Return the %GC list and length distrib. """ sys.setrecursionlimit(10000) random.seed() lengths = [] gc_list = [] for percent in range(0, 101): if fg[percent]: nb = sum(fg[percent].values()) * nfold sequences = [] for size in fg[percent].keys(): nb_to_retrieve = fg[percent][size] * nfold if bg: bg_bins = bg[percent] else: bg_bins = get_bins_len_from_bg_dir(bg_dir, percent) bg_keys = sorted(bg_bins.keys()) seqs, _ = extract_seq_rec(size, nb_to_retrieve, bg_keys, bg_bins, [], 0) sequences.extend(seqs) nb_match = len(sequences) gc_list.extend([percent] * nb_match) if nb_match != nb: sys.stderr.write("""*** WARNING *** Sample larger than population for {0:d}% G+C content: {1:d} needed and {2:d} obtained\n""".format(percent, nb, nb_match)) for s in sequences: lengths.append(len(s)) print "{0:s}".format(s.format("fasta")), return gc_list, lengths
wassermanlab/BiasAway
GC_compo_matching.py
Python
gpl-3.0
10,757
# -*- coding: utf-8 -*- ''' Copyright 2013 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Google Compute Engine Module ============================ The Google Compute Engine module. This module interfaces with Google Compute Engine (GCE). To authenticate to GCE, you will need to create a Service Account. To set up Service Account Authentication, follow the :ref:`gce_setup` instructions. Example Provider Configuration ------------------------------ .. code-block:: yaml my-gce-config: # The Google Cloud Platform Project ID project: "my-project-id" # The Service ACcount client ID service_account_email_address: 1234567890@developer.gserviceaccount.com # The location of the private key (PEM format) service_account_private_key: /home/erjohnso/PRIVKEY.pem driver: gce # Specify whether to use public or private IP for deploy script. # Valid options are: # private_ips - The salt-master is also hosted with GCE # public_ips - The salt-master is hosted outside of GCE ssh_interface: public_ips :maintainer: Eric Johnson <erjohnso@google.com> :depends: libcloud >= 0.14.1 ''' # pylint: disable=invalid-name,function-redefined # Import python libs from __future__ import absolute_import import os import re import stat import pprint import logging import msgpack from ast import literal_eval # Import 3rd-party libs # pylint: disable=import-error try: from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver from libcloud.loadbalancer.types import Provider as Provider_lb from libcloud.loadbalancer.providers import get_driver as get_driver_lb from libcloud.common.google import ( ResourceInUseError, ResourceNotFoundError, ) HAS_LIBCLOUD = True except ImportError: HAS_LIBCLOUD = False # pylint: enable=import-error # Import salt libs from salt.utils import namespaced_function import salt.ext.six as six import salt.utils.cloud import salt.config as config from salt.utils import http from salt import syspaths from salt.cloud.libcloudfuncs import * # pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import from salt.exceptions import ( SaltCloudSystemExit, ) # Get logging started log = logging.getLogger(__name__) __virtualname__ = 'gce' # custom UA _UA_PRODUCT = 'salt-cloud' _UA_VERSION = '0.2.0' # Redirect GCE functions to this module namespace avail_locations = namespaced_function(avail_locations, globals()) script = namespaced_function(script, globals()) destroy = namespaced_function(destroy, globals()) list_nodes = namespaced_function(list_nodes, globals()) list_nodes_full = namespaced_function(list_nodes_full, globals()) list_nodes_select = namespaced_function(list_nodes_select, globals()) GCE_VM_NAME_REGEX = re.compile(r'^(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?)$') # Only load in this module if the GCE configurations are in place def __virtual__(): ''' Set up the libcloud functions and check for GCE configurations. ''' if get_configured_provider() is False: return False if get_dependencies() is False: return False for provider, details in six.iteritems(__opts__['providers']): if 'gce' not in details: continue parameters = details['gce'] pathname = os.path.expanduser(parameters['service_account_private_key']) if not os.path.exists(pathname): log.error( 'The GCE service account private key {0!r} used in ' 'the {1!r} provider configuration does not exist\n'.format( parameters['service_account_private_key'], provider ) ) return False key_mode = str( oct(stat.S_IMODE(os.stat(pathname).st_mode)) ) if key_mode not in ('0400', '0600'): log.error( 'The GCE service account private key {0!r} used in ' 'the {1!r} provider configuration needs to be set to ' 'mode 0400 or 0600\n'.format( parameters['service_account_private_key'], provider ) ) return False return __virtualname__ def get_configured_provider(): ''' Return the first configured instance. ''' return config.is_provider_configured( __opts__, __active_provider_name__ or 'gce', ('project', 'service_account_email_address', 'service_account_private_key') ) def get_dependencies(): ''' Warn if dependencies aren't met. ''' return config.check_driver_dependencies( __virtualname__, {'libcloud': HAS_LIBCLOUD} ) def get_lb_conn(gce_driver=None): ''' Return a load-balancer conn object ''' if not gce_driver: raise SaltCloudSystemExit( 'Missing gce_driver for get_lb_conn method.' ) return get_driver_lb(Provider_lb.GCE)(gce_driver=gce_driver) def get_conn(): ''' Return a conn object for the passed VM data ''' driver = get_driver(Provider.GCE) provider = get_configured_provider() project = config.get_cloud_config_value('project', provider, __opts__) email = config.get_cloud_config_value('service_account_email_address', provider, __opts__) private_key = config.get_cloud_config_value('service_account_private_key', provider, __opts__) gce = driver(email, private_key, project=project) gce.connection.user_agent_append('{0}/{1}'.format(_UA_PRODUCT, _UA_VERSION)) return gce def _expand_item(item): ''' Convert the libcloud object into something more serializable. ''' ret = {} ret.update(item.__dict__) return ret def _expand_node(node): ''' Convert the libcloud Node object into something more serializable. ''' ret = {} ret.update(node.__dict__) try: del ret['extra']['boot_disk'] except Exception: # pylint: disable=W0703 pass zone = ret['extra']['zone'] ret['extra']['zone'] = {} ret['extra']['zone'].update(zone.__dict__) return ret def _expand_disk(disk): ''' Convert the libcloud Volume object into something more serializable. ''' ret = {} ret.update(disk.__dict__) zone = ret['extra']['zone'] ret['extra']['zone'] = {} ret['extra']['zone'].update(zone.__dict__) return ret def _expand_address(addy): ''' Convert the libcloud GCEAddress object into something more serializable. ''' ret = {} ret.update(addy.__dict__) ret['extra']['zone'] = addy.region.name return ret def _expand_balancer(lb): ''' Convert the libcloud load-balancer object into something more serializable. ''' ret = {} ret.update(lb.__dict__) hc = ret['extra']['healthchecks'] ret['extra']['healthchecks'] = [] for item in hc: ret['extra']['healthchecks'].append(_expand_item(item)) fwr = ret['extra']['forwarding_rule'] tp = ret['extra']['forwarding_rule'].targetpool reg = ret['extra']['forwarding_rule'].region ret['extra']['forwarding_rule'] = {} ret['extra']['forwarding_rule'].update(fwr.__dict__) ret['extra']['forwarding_rule']['targetpool'] = tp.name ret['extra']['forwarding_rule']['region'] = reg.name tp = ret['extra']['targetpool'] hc = ret['extra']['targetpool'].healthchecks nodes = ret['extra']['targetpool'].nodes region = ret['extra']['targetpool'].region zones = ret['extra']['targetpool'].region.zones ret['extra']['targetpool'] = {} ret['extra']['targetpool'].update(tp.__dict__) ret['extra']['targetpool']['region'] = _expand_item(region) ret['extra']['targetpool']['nodes'] = [] for n in nodes: ret['extra']['targetpool']['nodes'].append(_expand_node(n)) ret['extra']['targetpool']['healthchecks'] = [] for hci in hc: ret['extra']['targetpool']['healthchecks'].append(hci.name) ret['extra']['targetpool']['region']['zones'] = [] for z in zones: ret['extra']['targetpool']['region']['zones'].append(z.name) return ret def show_instance(vm_name, call=None): ''' Show the details of the existing instance. ''' if call != 'action': raise SaltCloudSystemExit( 'The show_instance action must be called with -a or --action.' ) conn = get_conn() node = _expand_node(conn.ex_get_node(vm_name)) salt.utils.cloud.cache_node(node, __active_provider_name__, __opts__) return node def avail_sizes(conn=None): ''' Return a dict of available instances sizes (a.k.a machine types) and convert them to something more serializable. ''' if not conn: conn = get_conn() raw_sizes = conn.list_sizes('all') # get *all* the machine types! sizes = [] for size in raw_sizes: zone = size.extra['zone'] size.extra['zone'] = {} size.extra['zone'].update(zone.__dict__) mtype = {} mtype.update(size.__dict__) sizes.append(mtype) return sizes def avail_images(conn=None): ''' Return a dict of all available VM images on the cloud provider with relevant data Note that for GCE, there are custom images within the project, but the generic images are in other projects. This returns a dict of images in the project plus images in 'debian-cloud' and 'centos-cloud' (If there is overlap in names, the one in the current project is used.) ''' if not conn: conn = get_conn() project_images = conn.list_images() debian_images = conn.list_images('debian-cloud') centos_images = conn.list_images('centos-cloud') all_images = debian_images + centos_images + project_images ret = {} for img in all_images: ret[img.name] = {} for attr in dir(img): if attr.startswith('_'): continue ret[img.name][attr] = getattr(img, attr) return ret def __get_image(conn, vm_): ''' The get_image for GCE allows partial name matching and returns a libcloud object. ''' img = config.get_cloud_config_value( 'image', vm_, __opts__, default='debian-7', search_global=False) return conn.ex_get_image(img) def __get_location(conn, vm_): ''' Need to override libcloud to find the zone. ''' location = config.get_cloud_config_value( 'location', vm_, __opts__) return conn.ex_get_zone(location) def __get_size(conn, vm_): ''' Need to override libcloud to find the machine type in the proper zone. ''' size = config.get_cloud_config_value( 'size', vm_, __opts__, default='n1-standard-1', search_global=False) return conn.ex_get_size(size, __get_location(conn, vm_)) def __get_tags(vm_): ''' Get configured tags. ''' t = config.get_cloud_config_value( 'tags', vm_, __opts__, default='[]', search_global=False) # Consider warning the user that the tags in the cloud profile # could not be interpreted, bad formatting? try: tags = literal_eval(t) except Exception: # pylint: disable=W0703 tags = None if not tags or not isinstance(tags, list): tags = None return tags def __get_metadata(vm_): ''' Get configured metadata and add 'salt-cloud-profile'. ''' md = config.get_cloud_config_value( 'metadata', vm_, __opts__, default='{}', search_global=False) # Consider warning the user that the metadata in the cloud profile # could not be interpreted, bad formatting? try: metadata = literal_eval(md) except Exception: # pylint: disable=W0703 metadata = None if not metadata or not isinstance(metadata, dict): metadata = {'items': [{ 'key': 'salt-cloud-profile', 'value': vm_['profile'] }]} else: metadata['salt-cloud-profile'] = vm_['profile'] items = [] for k, v in six.iteritems(metadata): items.append({'key': k, 'value': v}) metadata = {'items': items} return metadata def __get_host(node, vm_): ''' Return public IP, private IP, or hostname for the libcloud 'node' object ''' if __get_ssh_interface(vm_) == 'private_ips' or vm_['external_ip'] is None: ip_address = node.private_ips[0] log.info('Salt node data. Private_ip: {0}'.format(ip_address)) else: ip_address = node.public_ips[0] log.info('Salt node data. Public_ip: {0}'.format(ip_address)) if len(ip_address) > 0: return ip_address return node.name def __get_network(conn, vm_): ''' Return a GCE libcloud network object with matching name ''' network = config.get_cloud_config_value( 'network', vm_, __opts__, default='default', search_global=False) return conn.ex_get_network(network) def __get_ssh_interface(vm_): ''' Return the ssh_interface type to connect to. Either 'public_ips' (default) or 'private_ips'. ''' return config.get_cloud_config_value( 'ssh_interface', vm_, __opts__, default='public_ips', search_global=False ) def __create_orget_address(conn, name, region): ''' Reuse or create a static IP address. Returns a native GCEAddress construct to use with libcloud. ''' try: addy = conn.ex_get_address(name, region) except ResourceNotFoundError: # pylint: disable=W0703 addr_kwargs = { 'name': name, 'region': region } new_addy = create_address(addr_kwargs, "function") addy = conn.ex_get_address(new_addy['name'], new_addy['region']) return addy def _parse_allow(allow): ''' Convert firewall rule allowed user-string to specified REST API format. ''' # input=> tcp:53,tcp:80,tcp:443,icmp,tcp:4201,udp:53 # output<= [ # {"IPProtocol": "tcp", "ports": ["53","80","443","4201"]}, # {"IPProtocol": "icmp"}, # {"IPProtocol": "udp", "ports": ["53"]}, # ] seen_protos = {} allow_dict = [] protocols = allow.split(',') for p in protocols: pairs = p.split(':') if pairs[0].lower() not in ['tcp', 'udp', 'icmp']: raise SaltCloudSystemExit( 'Unsupported protocol {0}. Must be tcp, udp, or icmp.'.format( pairs[0] ) ) if len(pairs) == 1 or pairs[0].lower() == 'icmp': seen_protos[pairs[0]] = [] else: if pairs[0] not in seen_protos: seen_protos[pairs[0]] = [pairs[1]] else: seen_protos[pairs[0]].append(pairs[1]) for k in seen_protos: d = {'IPProtocol': k} if len(seen_protos[k]) > 0: d['ports'] = seen_protos[k] allow_dict.append(d) log.debug("firewall allowed protocols/ports: {0}".format(allow_dict)) return allow_dict def __get_ssh_credentials(vm_): ''' Get configured SSH credentials. ''' ssh_user = config.get_cloud_config_value( 'ssh_username', vm_, __opts__, default=os.getenv('USER')) ssh_key = config.get_cloud_config_value( 'ssh_keyfile', vm_, __opts__, default=os.path.expanduser('~/.ssh/google_compute_engine')) return ssh_user, ssh_key def create_network(kwargs=None, call=None): ''' Create a GCE network. CLI Example: .. code-block:: bash salt-cloud -f create_network gce name=mynet cidr=10.10.10.0/24 ''' if call != 'function': raise SaltCloudSystemExit( 'The create_network function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when creating a network.' ) return False if 'cidr' not in kwargs: log.error( 'A network CIDR range must be specified when creating a network.' ) return name = kwargs['name'] cidr = kwargs['cidr'] conn = get_conn() salt.utils.cloud.fire_event( 'event', 'create network', 'salt/cloud/net/creating', { 'name': name, 'cidr': cidr, }, transport=__opts__['transport'] ) network = conn.ex_create_network(name, cidr) salt.utils.cloud.fire_event( 'event', 'created network', 'salt/cloud/net/created', { 'name': name, 'cidr': cidr, }, transport=__opts__['transport'] ) return _expand_item(network) def delete_network(kwargs=None, call=None): ''' Permanently delete a network. CLI Example: .. code-block:: bash salt-cloud -f delete_network gce name=mynet ''' if call != 'function': raise SaltCloudSystemExit( 'The delete_network function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when deleting a network.' ) return False name = kwargs['name'] conn = get_conn() salt.utils.cloud.fire_event( 'event', 'delete network', 'salt/cloud/net/deleting', { 'name': name, }, transport=__opts__['transport'] ) try: result = conn.ex_destroy_network( conn.ex_get_network(name) ) except ResourceNotFoundError as exc: log.error( 'Nework {0} could not be found.\n' 'The following exception was thrown by libcloud:\n{1}'.format( name, exc), exc_info_on_loglevel=logging.DEBUG ) return False salt.utils.cloud.fire_event( 'event', 'deleted network', 'salt/cloud/net/deleted', { 'name': name, }, transport=__opts__['transport'] ) return result def show_network(kwargs=None, call=None): ''' Show the details of an existing network. CLI Example: .. code-block:: bash salt-cloud -f show_network gce name=mynet ''' if call != 'function': raise SaltCloudSystemExit( 'The show_network function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'Must specify name of network.' ) return False conn = get_conn() return _expand_item(conn.ex_get_network(kwargs['name'])) def create_fwrule(kwargs=None, call=None): ''' Create a GCE firewall rule. The 'default' network is used if not specified. CLI Example: .. code-block:: bash salt-cloud -f create_fwrule gce name=allow-http allow=tcp:80 ''' if call != 'function': raise SaltCloudSystemExit( 'The create_fwrule function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when creating a firewall rule.' ) return False if 'allow' not in kwargs: log.error( 'Must use "allow" to specify allowed protocols/ports.' ) return False name = kwargs['name'] network_name = kwargs.get('network', 'default') allow = _parse_allow(kwargs['allow']) src_range = kwargs.get('src_range', '0.0.0.0/0') src_tags = kwargs.get('src_tags', None) dst_tags = kwargs.get('dst_tags', None) if src_range: src_range = src_range.split(',') if src_tags: src_tags = src_tags.split(',') if dst_tags: dst_tags = dst_tags.split(',') conn = get_conn() salt.utils.cloud.fire_event( 'event', 'create firewall', 'salt/cloud/firewall/creating', { 'name': name, 'network': network_name, 'allow': kwargs['allow'], }, transport=__opts__['transport'] ) fwrule = conn.ex_create_firewall( name, allow, network=network_name, source_ranges=src_range, source_tags=src_tags, target_tags=dst_tags ) salt.utils.cloud.fire_event( 'event', 'created firewall', 'salt/cloud/firewall/created', { 'name': name, 'network': network_name, 'allow': kwargs['allow'], }, transport=__opts__['transport'] ) return _expand_item(fwrule) def delete_fwrule(kwargs=None, call=None): ''' Permanently delete a firewall rule. CLI Example: .. code-block:: bash salt-cloud -f delete_fwrule gce name=allow-http ''' if call != 'function': raise SaltCloudSystemExit( 'The delete_fwrule function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when deleting a firewall rule.' ) return False name = kwargs['name'] conn = get_conn() salt.utils.cloud.fire_event( 'event', 'delete firewall', 'salt/cloud/firewall/deleting', { 'name': name, }, transport=__opts__['transport'] ) try: result = conn.ex_destroy_firewall( conn.ex_get_firewall(name) ) except ResourceNotFoundError as exc: log.error( 'Rule {0} could not be found.\n' 'The following exception was thrown by libcloud:\n{1}'.format( name, exc), exc_info_on_loglevel=logging.DEBUG ) return False salt.utils.cloud.fire_event( 'event', 'deleted firewall', 'salt/cloud/firewall/deleted', { 'name': name, }, transport=__opts__['transport'] ) return result def show_fwrule(kwargs=None, call=None): ''' Show the details of an existing firewall rule. CLI Example: .. code-block:: bash salt-cloud -f show_fwrule gce name=allow-http ''' if call != 'function': raise SaltCloudSystemExit( 'The show_fwrule function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'Must specify name of network.' ) return False conn = get_conn() return _expand_item(conn.ex_get_firewall(kwargs['name'])) def create_hc(kwargs=None, call=None): ''' Create an HTTP health check configuration. CLI Example: .. code-block:: bash salt-cloud -f create_hc gce name=hc path=/healthy port=80 ''' if call != 'function': raise SaltCloudSystemExit( 'The create_hc function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when creating a health check.' ) return False name = kwargs['name'] host = kwargs.get('host', None) path = kwargs.get('path', None) port = kwargs.get('port', None) interval = kwargs.get('interval', None) timeout = kwargs.get('timeout', None) unhealthy_threshold = kwargs.get('unhealthy_threshold', None) healthy_threshold = kwargs.get('healthy_threshold', None) conn = get_conn() salt.utils.cloud.fire_event( 'event', 'create health_check', 'salt/cloud/healthcheck/creating', { 'name': name, 'host': host, 'path': path, 'port': port, 'interval': interval, 'timeout': timeout, 'unhealthy_threshold': unhealthy_threshold, 'healthy_threshold': healthy_threshold, }, transport=__opts__['transport'] ) hc = conn.ex_create_healthcheck( name, host=host, path=path, port=port, interval=interval, timeout=timeout, unhealthy_threshold=unhealthy_threshold, healthy_threshold=healthy_threshold ) salt.utils.cloud.fire_event( 'event', 'created health_check', 'salt/cloud/healthcheck/created', { 'name': name, 'host': host, 'path': path, 'port': port, 'interval': interval, 'timeout': timeout, 'unhealthy_threshold': unhealthy_threshold, 'healthy_threshold': healthy_threshold, }, transport=__opts__['transport'] ) return _expand_item(hc) def delete_hc(kwargs=None, call=None): ''' Permanently delete a health check. CLI Example: .. code-block:: bash salt-cloud -f delete_hc gce name=hc ''' if call != 'function': raise SaltCloudSystemExit( 'The delete_hc function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when deleting a health check.' ) return False name = kwargs['name'] conn = get_conn() salt.utils.cloud.fire_event( 'event', 'delete health_check', 'salt/cloud/healthcheck/deleting', { 'name': name, }, transport=__opts__['transport'] ) try: result = conn.ex_destroy_healthcheck( conn.ex_get_healthcheck(name) ) except ResourceNotFoundError as exc: log.error( 'Health check {0} could not be found.\n' 'The following exception was thrown by libcloud:\n{1}'.format( name, exc), exc_info_on_loglevel=logging.DEBUG ) return False salt.utils.cloud.fire_event( 'event', 'deleted health_check', 'salt/cloud/healthcheck/deleted', { 'name': name, }, transport=__opts__['transport'] ) return result def show_hc(kwargs=None, call=None): ''' Show the details of an existing health check. CLI Example: .. code-block:: bash salt-cloud -f show_hc gce name=hc ''' if call != 'function': raise SaltCloudSystemExit( 'The show_hc function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'Must specify name of health check.' ) return False conn = get_conn() return _expand_item(conn.ex_get_healthcheck(kwargs['name'])) def create_address(kwargs=None, call=None): ''' Create a static address in a region. CLI Example: .. code-block:: bash salt-cloud -f create_address gce name=my-ip region=us-central1 address=IP ''' if call != 'function': raise SaltCloudSystemExit( 'The create_address function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when creating an address.' ) return False if 'region' not in kwargs: log.error( 'A region must be specified for the address.' ) return False name = kwargs['name'] ex_region = kwargs['region'] ex_address = kwargs.get("address", None) conn = get_conn() salt.utils.cloud.fire_event( 'event', 'create address', 'salt/cloud/address/creating', kwargs, transport=__opts__['transport'] ) addy = conn.ex_create_address(name, ex_region, ex_address) salt.utils.cloud.fire_event( 'event', 'created address', 'salt/cloud/address/created', kwargs, transport=__opts__['transport'] ) log.info('Created GCE Address '+name) return _expand_address(addy) def delete_address(kwargs=None, call=None): ''' Permanently delete a static address. CLI Example: .. code-block:: bash salt-cloud -f delete_address gce name=my-ip ''' if call != 'function': raise SaltCloudSystemExit( 'The delete_address function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when deleting an address.' ) return False if not kwargs or 'region' not in kwargs: log.error( 'A region must be specified when deleting an address.' ) return False name = kwargs['name'] ex_region = kwargs['region'] conn = get_conn() salt.utils.cloud.fire_event( 'event', 'delete address', 'salt/cloud/address/deleting', { 'name': name, }, transport=__opts__['transport'] ) try: result = conn.ex_destroy_address( conn.ex_get_address(name, ex_region) ) except ResourceNotFoundError as exc: log.error( 'Address {0} could not be found (region {1})\n' 'The following exception was thrown by libcloud:\n{2}'.format( name, ex_region, exc), exc_info_on_loglevel=logging.DEBUG ) return False salt.utils.cloud.fire_event( 'event', 'deleted address', 'salt/cloud/address/deleted', { 'name': name, }, transport=__opts__['transport'] ) log.info('Deleted GCE Address ' + name) return result def show_address(kwargs=None, call=None): ''' Show the details of an existing static address. CLI Example: .. code-block:: bash salt-cloud -f show_address gce name=mysnapshot region=us-central1 ''' if call != 'function': raise SaltCloudSystemExit( 'The show_snapshot function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'Must specify name.' ) return False if not kwargs or 'region' not in kwargs: log.error( 'Must specify region.' ) return False conn = get_conn() return _expand_address(conn.ex_get_address(kwargs['name'], kwargs['region'])) def create_lb(kwargs=None, call=None): ''' Create a load-balancer configuration. CLI Example: .. code-block:: bash salt-cloud -f create_lb gce name=lb region=us-central1 ports=80 ''' if call != 'function': raise SaltCloudSystemExit( 'The create_lb function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when creating a health check.' ) return False if 'ports' not in kwargs: log.error( 'A port or port-range must be specified for the load-balancer.' ) return False if 'region' not in kwargs: log.error( 'A region must be specified for the load-balancer.' ) return False if 'members' not in kwargs: log.error( 'A comma-separated list of members must be specified.' ) return False name = kwargs['name'] ports = kwargs['ports'] ex_region = kwargs['region'] members = kwargs.get('members').split(',') protocol = kwargs.get('protocol', 'tcp') algorithm = kwargs.get('algorithm', None) ex_healthchecks = kwargs.get('healthchecks', None) # pylint: disable=W0511 conn = get_conn() lb_conn = get_lb_conn(conn) ex_address = kwargs.get('address', None) if ex_address is not None: ex_address = __create_orget_address(conn, ex_address, ex_region) if ex_healthchecks: ex_healthchecks = ex_healthchecks.split(',') salt.utils.cloud.fire_event( 'event', 'create load_balancer', 'salt/cloud/loadbalancer/creating', kwargs, transport=__opts__['transport'] ) lb = lb_conn.create_balancer( name, ports, protocol, algorithm, members, ex_region=ex_region, ex_healthchecks=ex_healthchecks, ex_address=ex_address ) salt.utils.cloud.fire_event( 'event', 'created load_balancer', 'salt/cloud/loadbalancer/created', kwargs, transport=__opts__['transport'] ) return _expand_balancer(lb) def delete_lb(kwargs=None, call=None): ''' Permanently delete a load-balancer. CLI Example: .. code-block:: bash salt-cloud -f delete_lb gce name=lb ''' if call != 'function': raise SaltCloudSystemExit( 'The delete_hc function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when deleting a health check.' ) return False name = kwargs['name'] lb_conn = get_lb_conn(get_conn()) salt.utils.cloud.fire_event( 'event', 'delete load_balancer', 'salt/cloud/loadbalancer/deleting', { 'name': name, }, transport=__opts__['transport'] ) try: result = lb_conn.destroy_balancer( lb_conn.get_balancer(name) ) except ResourceNotFoundError as exc: log.error( 'Load balancer {0} could not be found.\n' 'The following exception was thrown by libcloud:\n{1}'.format( name, exc), exc_info_on_loglevel=logging.DEBUG ) return False salt.utils.cloud.fire_event( 'event', 'deleted load_balancer', 'salt/cloud/loadbalancer/deleted', { 'name': name, }, transport=__opts__['transport'] ) return result def show_lb(kwargs=None, call=None): ''' Show the details of an existing load-balancer. CLI Example: .. code-block:: bash salt-cloud -f show_lb gce name=lb ''' if call != 'function': raise SaltCloudSystemExit( 'The show_lb function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'Must specify name of load-balancer.' ) return False lb_conn = get_lb_conn(get_conn()) return _expand_balancer(lb_conn.get_balancer(kwargs['name'])) def attach_lb(kwargs=None, call=None): ''' Add an existing node/member to an existing load-balancer configuration. CLI Example: .. code-block:: bash salt-cloud -f attach_lb gce name=lb member=myinstance ''' if call != 'function': raise SaltCloudSystemExit( 'The attach_lb function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A load-balancer name must be specified.' ) return False if 'member' not in kwargs: log.error( 'A node name name must be specified.' ) return False conn = get_conn() node = conn.ex_get_node(kwargs['member']) lb_conn = get_lb_conn(conn) lb = lb_conn.get_balancer(kwargs['name']) salt.utils.cloud.fire_event( 'event', 'attach load_balancer', 'salt/cloud/loadbalancer/attaching', kwargs, transport=__opts__['transport'] ) result = lb_conn.balancer_attach_compute_node(lb, node) salt.utils.cloud.fire_event( 'event', 'attached load_balancer', 'salt/cloud/loadbalancer/attached', kwargs, transport=__opts__['transport'] ) return _expand_item(result) def detach_lb(kwargs=None, call=None): ''' Remove an existing node/member from an existing load-balancer configuration. CLI Example: .. code-block:: bash salt-cloud -f detach_lb gce name=lb member=myinstance ''' if call != 'function': raise SaltCloudSystemExit( 'The detach_lb function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A load-balancer name must be specified.' ) return False if 'member' not in kwargs: log.error( 'A node name name must be specified.' ) return False conn = get_conn() lb_conn = get_lb_conn(conn) lb = lb_conn.get_balancer(kwargs['name']) member_list = lb_conn.balancer_list_members(lb) remove_member = None for member in member_list: if member.id == kwargs['member']: remove_member = member break if not remove_member: log.error( 'The specified member {0} was not a member of LB {1}.'.format( kwargs['member'], kwargs['name'] ) ) return False salt.utils.cloud.fire_event( 'event', 'detach load_balancer', 'salt/cloud/loadbalancer/detaching', kwargs, transport=__opts__['transport'] ) result = lb_conn.balancer_detach_member(lb, remove_member) salt.utils.cloud.fire_event( 'event', 'detached load_balancer', 'salt/cloud/loadbalancer/detached', kwargs, transport=__opts__['transport'] ) return result def delete_snapshot(kwargs=None, call=None): ''' Permanently delete a disk snapshot. CLI Example: .. code-block:: bash salt-cloud -f delete_snapshot gce name=disk-snap-1 ''' if call != 'function': raise SaltCloudSystemExit( 'The delete_snapshot function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when deleting a snapshot.' ) return False name = kwargs['name'] conn = get_conn() salt.utils.cloud.fire_event( 'event', 'delete snapshot', 'salt/cloud/snapshot/deleting', { 'name': name, }, transport=__opts__['transport'] ) try: result = conn.destroy_volume_snapshot( conn.ex_get_snapshot(name) ) except ResourceNotFoundError as exc: log.error( 'Snapshot {0} could not be found.\n' 'The following exception was thrown by libcloud:\n{1}'.format( name, exc), exc_info_on_loglevel=logging.DEBUG ) return False salt.utils.cloud.fire_event( 'event', 'deleted snapshot', 'salt/cloud/snapshot/deleted', { 'name': name, }, transport=__opts__['transport'] ) return result def delete_disk(kwargs=None, call=None): ''' Permanently delete a persistent disk. CLI Example: .. code-block:: bash salt-cloud -f delete_disk gce disk_name=pd ''' if call != 'function': raise SaltCloudSystemExit( 'The delete_disk function must be called with -f or --function.' ) if not kwargs or 'disk_name' not in kwargs: log.error( 'A disk_name must be specified when deleting a disk.' ) return False conn = get_conn() disk = conn.ex_get_volume(kwargs.get('disk_name')) salt.utils.cloud.fire_event( 'event', 'delete disk', 'salt/cloud/disk/deleting', { 'name': disk.name, 'location': disk.extra['zone'].name, 'size': disk.size, }, transport=__opts__['transport'] ) try: result = conn.destroy_volume(disk) except ResourceInUseError as exc: log.error( 'Disk {0} is in use and must be detached before deleting.\n' 'The following exception was thrown by libcloud:\n{1}'.format( disk.name, exc), exc_info_on_loglevel=logging.DEBUG ) return False salt.utils.cloud.fire_event( 'event', 'deleted disk', 'salt/cloud/disk/deleted', { 'name': disk.name, 'location': disk.extra['zone'].name, 'size': disk.size, }, transport=__opts__['transport'] ) return result def create_disk(kwargs=None, call=None): ''' Create a new persistent disk. Must specify `disk_name` and `location`. Can also specify an `image` or `snapshot` but if neither of those are specified, a `size` (in GB) is required. CLI Example: .. code-block:: bash salt-cloud -f create_disk gce disk_name=pd size=300 location=us-central1-b ''' if call != 'function': raise SaltCloudSystemExit( 'The create_disk function must be called with -f or --function.' ) if kwargs is None: kwargs = {} name = kwargs.get('disk_name', None) image = kwargs.get('image', None) location = kwargs.get('location', None) size = kwargs.get('size', None) snapshot = kwargs.get('snapshot', None) if location is None: log.error( 'A location (zone) must be specified when creating a disk.' ) return False if name is None: log.error( 'A disk_name must be specified when creating a disk.' ) return False if 'size' is None and 'image' is None and 'snapshot' is None: log.error( 'Must specify image, snapshot, or size.' ) return False conn = get_conn() location = conn.ex_get_zone(kwargs['location']) use_existing = True salt.utils.cloud.fire_event( 'event', 'create disk', 'salt/cloud/disk/creating', { 'name': name, 'location': location.name, 'image': image, 'snapshot': snapshot, }, transport=__opts__['transport'] ) disk = conn.create_volume( size, name, location, snapshot, image, use_existing ) salt.utils.cloud.fire_event( 'event', 'created disk', 'salt/cloud/disk/created', { 'name': name, 'location': location.name, 'image': image, 'snapshot': snapshot, }, transport=__opts__['transport'] ) return _expand_disk(disk) def create_snapshot(kwargs=None, call=None): ''' Create a new disk snapshot. Must specify `name` and `disk_name`. CLI Example: .. code-block:: bash salt-cloud -f create_snapshot gce name=snap1 disk_name=pd ''' if call != 'function': raise SaltCloudSystemExit( 'The create_snapshot function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when creating a snapshot.' ) return False if 'disk_name' not in kwargs: log.error( 'A disk_name must be specified when creating a snapshot.' ) return False conn = get_conn() name = kwargs.get('name') disk_name = kwargs.get('disk_name') try: disk = conn.ex_get_volume(disk_name) except ResourceNotFoundError as exc: log.error( 'Disk {0} could not be found.\n' 'The following exception was thrown by libcloud:\n{1}'.format( disk_name, exc), exc_info_on_loglevel=logging.DEBUG ) return False salt.utils.cloud.fire_event( 'event', 'create snapshot', 'salt/cloud/snapshot/creating', { 'name': name, 'disk_name': disk_name, }, transport=__opts__['transport'] ) snapshot = conn.create_volume_snapshot(disk, name) salt.utils.cloud.fire_event( 'event', 'created snapshot', 'salt/cloud/snapshot/created', { 'name': name, 'disk_name': disk_name, }, transport=__opts__['transport'] ) return _expand_item(snapshot) def show_disk(name=None, kwargs=None, call=None): # pylint: disable=W0613 ''' Show the details of an existing disk. CLI Example: .. code-block:: bash salt-cloud -a show_disk myinstance disk_name=mydisk salt-cloud -f show_disk gce disk_name=mydisk ''' if not kwargs or 'disk_name' not in kwargs: log.error( 'Must specify disk_name.' ) return False conn = get_conn() return _expand_disk(conn.ex_get_volume(kwargs['disk_name'])) def show_snapshot(kwargs=None, call=None): ''' Show the details of an existing snapshot. CLI Example: .. code-block:: bash salt-cloud -f show_snapshot gce name=mysnapshot ''' if call != 'function': raise SaltCloudSystemExit( 'The show_snapshot function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'Must specify name.' ) return False conn = get_conn() return _expand_item(conn.ex_get_snapshot(kwargs['name'])) def detach_disk(name=None, kwargs=None, call=None): ''' Detach a disk from an instance. CLI Example: .. code-block:: bash salt-cloud -a detach_disk myinstance disk_name=mydisk ''' if call != 'action': raise SaltCloudSystemExit( 'The detach_Disk action must be called with -a or --action.' ) if not name: log.error( 'Must specify an instance name.' ) return False if not kwargs or 'disk_name' not in kwargs: log.error( 'Must specify a disk_name to detach.' ) return False node_name = name disk_name = kwargs['disk_name'] conn = get_conn() node = conn.ex_get_node(node_name) disk = conn.ex_get_volume(disk_name) salt.utils.cloud.fire_event( 'event', 'detach disk', 'salt/cloud/disk/detaching', { 'name': node_name, 'disk_name': disk_name, }, transport=__opts__['transport'] ) result = conn.detach_volume(disk, node) salt.utils.cloud.fire_event( 'event', 'detached disk', 'salt/cloud/disk/detached', { 'name': node_name, 'disk_name': disk_name, }, transport=__opts__['transport'] ) return result def attach_disk(name=None, kwargs=None, call=None): ''' Attach an existing disk to an existing instance. CLI Example: .. code-block:: bash salt-cloud -a attach_disk myinstance disk_name=mydisk mode=READ_WRITE ''' if call != 'action': raise SaltCloudSystemExit( 'The attach_disk action must be called with -a or --action.' ) if not name: log.error( 'Must specify an instance name.' ) return False if not kwargs or 'disk_name' not in kwargs: log.error( 'Must specify a disk_name to attach.' ) return False node_name = name disk_name = kwargs['disk_name'] mode = kwargs.get('mode', 'READ_WRITE').upper() boot = kwargs.get('boot', False) if boot and boot.lower() in ['true', 'yes', 'enabled']: boot = True else: boot = False if mode not in ['READ_WRITE', 'READ_ONLY']: log.error( 'Mode must be either READ_ONLY or (default) READ_WRITE.' ) return False conn = get_conn() node = conn.ex_get_node(node_name) disk = conn.ex_get_volume(disk_name) salt.utils.cloud.fire_event( 'event', 'attach disk', 'salt/cloud/disk/attaching', { 'name': node_name, 'disk_name': disk_name, 'mode': mode, 'boot': boot, }, transport=__opts__['transport'] ) result = conn.attach_volume(node, disk, ex_mode=mode, ex_boot=boot) salt.utils.cloud.fire_event( 'event', 'attached disk', 'salt/cloud/disk/attached', { 'name': node_name, 'disk_name': disk_name, 'mode': mode, 'boot': boot, }, transport=__opts__['transport'] ) return result def reboot(vm_name, call=None): ''' Call GCE 'reset' on the instance. CLI Example: .. code-block:: bash salt-cloud -a reboot myinstance ''' if call != 'action': raise SaltCloudSystemExit( 'The reboot action must be called with -a or --action.' ) conn = get_conn() return conn.reboot_node( conn.ex_get_node(vm_name) ) def destroy(vm_name, call=None): ''' Call 'destroy' on the instance. Can be called with "-a destroy" or -d CLI Example: .. code-block:: bash salt-cloud -a destroy myinstance1 myinstance2 ... salt-cloud -d myinstance1 myinstance2 ... ''' if call and call != 'action': raise SaltCloudSystemExit( 'The destroy action must be called with -d or "-a destroy".' ) conn = get_conn() try: node = conn.ex_get_node(vm_name) except Exception as exc: # pylint: disable=W0703 log.error( 'Could not locate instance {0}\n\n' 'The following exception was thrown by libcloud when trying to ' 'run the initial deployment: \n{1}'.format( vm_name, exc ), exc_info_on_loglevel=logging.DEBUG ) raise SaltCloudSystemExit( 'Could not find instance {0}.'.format(vm_name) ) salt.utils.cloud.fire_event( 'event', 'delete instance', 'salt/cloud/{0}/deleting'.format(vm_name), {'name': vm_name}, transport=__opts__['transport'] ) # Use the instance metadata to see if its salt cloud profile was # preserved during instance create. If so, use the profile value # to see if the 'delete_boot_pd' value is set to delete the disk # along with the instance. profile = None if node.extra['metadata'] and 'items' in node.extra['metadata']: for md in node.extra['metadata']['items']: if md['key'] == 'salt-cloud-profile': profile = md['value'] vm_ = get_configured_provider() delete_boot_pd = False if profile and profile in vm_['profiles'] and 'delete_boot_pd' in vm_['profiles'][profile]: delete_boot_pd = vm_['profiles'][profile]['delete_boot_pd'] try: inst_deleted = conn.destroy_node(node) except Exception as exc: # pylint: disable=W0703 log.error( 'Could not destroy instance {0}\n\n' 'The following exception was thrown by libcloud when trying to ' 'run the initial deployment: \n{1}'.format( vm_name, exc ), exc_info_on_loglevel=logging.DEBUG ) raise SaltCloudSystemExit( 'Could not destroy instance {0}.'.format(vm_name) ) salt.utils.cloud.fire_event( 'event', 'delete instance', 'salt/cloud/{0}/deleted'.format(vm_name), {'name': vm_name}, transport=__opts__['transport'] ) if delete_boot_pd: log.info( 'delete_boot_pd is enabled for the instance profile, ' 'attempting to delete disk' ) salt.utils.cloud.fire_event( 'event', 'delete disk', 'salt/cloud/disk/deleting', {'name': vm_name}, transport=__opts__['transport'] ) try: conn.destroy_volume(conn.ex_get_volume(vm_name)) except Exception as exc: # pylint: disable=W0703 # Note that we don't raise a SaltCloudSystemExit here in order # to allow completion of instance deletion. Just log the error # and keep going. log.error( 'Could not destroy disk {0}\n\n' 'The following exception was thrown by libcloud when trying ' 'to run the initial deployment: \n{1}'.format( vm_name, exc ), exc_info_on_loglevel=logging.DEBUG ) salt.utils.cloud.fire_event( 'event', 'deleted disk', 'salt/cloud/disk/deleted', {'name': vm_name}, transport=__opts__['transport'] ) if __opts__.get('update_cachedir', False) is True: salt.utils.cloud.delete_minion_cachedir(vm_name, __active_provider_name__.split(':')[0], __opts__) return inst_deleted def create(vm_=None, call=None): ''' Create a single GCE instance from a data dict. ''' if call: raise SaltCloudSystemExit( 'You cannot create an instance with -a or -f.' ) if not GCE_VM_NAME_REGEX.match(vm_['name']): raise SaltCloudSystemExit( 'VM names must start with a letter, only contain letters, numbers, or dashes ' 'and cannot end in a dash.' ) try: # Check for required profile parameters before sending any API calls. if vm_['profile'] and config.is_profile_configured(__opts__, __active_provider_name__ or 'gce', vm_['profile'], vm_=vm_) is False: return False except AttributeError: pass # Since using "provider: <provider-engine>" is deprecated, alias provider # to use driver: "driver: <provider-engine>" if 'provider' in vm_: vm_['driver'] = vm_.pop('provider') conn = get_conn() kwargs = { 'name': vm_['name'], 'size': __get_size(conn, vm_), 'image': __get_image(conn, vm_), 'location': __get_location(conn, vm_), 'ex_network': __get_network(conn, vm_), 'ex_tags': __get_tags(vm_), 'ex_metadata': __get_metadata(vm_), } external_ip = config.get_cloud_config_value( 'external_ip', vm_, __opts__, default='ephemeral' ) if external_ip.lower() == 'ephemeral': external_ip = 'ephemeral' elif external_ip == 'None': external_ip = None else: region = '-'.join(kwargs['location'].name.split('-')[:2]) external_ip = __create_orget_address(conn, external_ip, region) kwargs['external_ip'] = external_ip vm_['external_ip'] = external_ip if LIBCLOUD_VERSION_INFO > (0, 15, 1): kwargs.update({ 'ex_disk_type': config.get_cloud_config_value( 'ex_disk_type', vm_, __opts__, default='pd-standard'), 'ex_disk_auto_delete': config.get_cloud_config_value( 'ex_disk_auto_delete', vm_, __opts__, default=True), 'ex_disks_gce_struct': config.get_cloud_config_value( 'ex_disks_gce_struct', vm_, __opts__, default=None), 'ex_service_accounts': config.get_cloud_config_value( 'ex_service_accounts', vm_, __opts__, default=None), 'ex_can_ip_forward': config.get_cloud_config_value( 'ip_forwarding', vm_, __opts__, default=False ) }) if kwargs.get('ex_disk_type') not in ('pd-standard', 'pd-ssd'): raise SaltCloudSystemExit( 'The value of \'ex_disk_type\' needs to be one of: ' '\'pd-standard\', \'pd-ssd\'' ) log.info('Creating GCE instance {0} in {1}'.format(vm_['name'], kwargs['location'].name) ) log.debug('Create instance kwargs {0}'.format(str(kwargs))) salt.utils.cloud.fire_event( 'event', 'create instance', 'salt/cloud/{0}/creating'.format(vm_['name']), { 'name': vm_['name'], 'profile': vm_['profile'], 'provider': vm_['driver'], }, transport=__opts__['transport'] ) try: node_data = conn.create_node(**kwargs) except Exception as exc: # pylint: disable=W0703 log.error( 'Error creating {0} on GCE\n\n' 'The following exception was thrown by libcloud when trying to ' 'run the initial deployment: \n{1}'.format( vm_['name'], exc ), exc_info_on_loglevel=logging.DEBUG ) return False try: node_dict = show_instance(node_data['name'], 'action') except TypeError: # node_data is a libcloud Node which is unsubscriptable node_dict = show_instance(node_data.name, 'action') ssh_user, ssh_key = __get_ssh_credentials(vm_) vm_['ssh_host'] = __get_host(node_data, vm_) vm_['key_filename'] = ssh_key salt.utils.cloud.bootstrap(vm_, __opts__) log.info('Created Cloud VM {0[name]!r}'.format(vm_)) log.trace( '{0[name]!r} VM creation details:\n{1}'.format( vm_, pprint.pformat(node_dict) ) ) salt.utils.cloud.fire_event( 'event', 'created instance', 'salt/cloud/{0}/created'.format(vm_['name']), { 'name': vm_['name'], 'profile': vm_['profile'], 'provider': vm_['driver'], }, transport=__opts__['transport'] ) return node_dict def update_pricing(kwargs=None, call=None): ''' Download most recent pricing information from GCE and save locally CLI Examples: .. code-block:: bash salt-cloud -f update_pricing my-gce-config .. versionadded:: 2015.8.0 ''' url = 'https://cloudpricingcalculator.appspot.com/static/data/pricelist.json' price_json = http.query(url, decode=True, decode_type='json') outfile = os.path.join( syspaths.CACHE_DIR, 'cloud', 'gce-pricing.p' ) with salt.utils.fopen(outfile, 'w') as fho: msgpack.dump(price_json['dict'], fho) return True def show_pricing(kwargs=None, call=None): ''' Show pricing for a particular profile. This is only an estimate, based on unofficial pricing sources. .. versionadded:: 2015.8.0 CLI Examples: .. code-block:: bash salt-cloud -f show_pricing my-gce-config profile=my-profile ''' profile = __opts__['profiles'].get(kwargs['profile'], {}) if not profile: return {'Error': 'The requested profile was not found'} # Make sure the profile belongs to Digital Ocean provider = profile.get('provider', '0:0') comps = provider.split(':') if len(comps) < 2 or comps[1] != 'gce': return {'Error': 'The requested profile does not belong to GCE'} comps = profile.get('location', 'us').split('-') region = comps[0] size = 'CP-COMPUTEENGINE-VMIMAGE-{0}'.format(profile['size'].upper()) pricefile = os.path.join( syspaths.CACHE_DIR, 'cloud', 'gce-pricing.p' ) if not os.path.exists(pricefile): update_pricing() with salt.utils.fopen(pricefile, 'r') as fho: sizes = msgpack.load(fho) per_hour = float(sizes['gcp_price_list'][size][region]) week1_discount = float(sizes['gcp_price_list']['sustained_use_tiers']['0.25']) week2_discount = float(sizes['gcp_price_list']['sustained_use_tiers']['0.50']) week3_discount = float(sizes['gcp_price_list']['sustained_use_tiers']['0.75']) week4_discount = float(sizes['gcp_price_list']['sustained_use_tiers']['1.0']) week1 = per_hour * (730/4) * week1_discount week2 = per_hour * (730/4) * week2_discount week3 = per_hour * (730/4) * week3_discount week4 = per_hour * (730/4) * week4_discount raw = sizes ret = {} ret['per_hour'] = per_hour ret['per_day'] = ret['per_hour'] * 24 ret['per_week'] = ret['per_day'] * 7 ret['per_month'] = week1 + week2 + week3 + week4 ret['per_year'] = ret['per_month'] * 12 if kwargs.get('raw', False): ret['_raw'] = raw return {profile['profile']: ret}
smallyear/linuxLearn
salt/salt/cloud/clouds/gce.py
Python
apache-2.0
60,909
# -*- coding: utf-8 -*- # Copyright (C) 2006-2012 Søren Roug, European Environment Agency # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Contributor(s): # from namespaces import * import re import types pattern_color = re.compile(r'#[0-9a-fA-F]{6}') pattern_vector3D = re.compile( r'\([ ]*-?([0-9]+(\.[0-9]*)?|\.[0-9]+)([ ]+-?([0-9]+(\.[0-9]*)?|\.[0-9]+)){2}[ ]*\)') def make_NCName(arg): for c in (':', ' '): arg = arg.replace(c, "_%x_" % ord(c)) return arg def cnv_anyURI(attribute, arg, element): return unicode(arg) def cnv_boolean(attribute, arg, element): """ XML Schema Part 2: Datatypes Second Edition An instance of a datatype that is defined as boolean can have the following legal literals {true, false, 1, 0} """ if str(arg).lower() in ("0", "false", "no"): return "false" if str(arg).lower() in ("1", "true", "yes"): return "true" raise ValueError("'%s' not allowed as Boolean value for %s" % ( str(arg), attribute)) # Potentially accept color values def cnv_color(attribute, arg, element): """ A RGB color in conformance with §5.9.11 of [XSL], that is a RGB color in notation “#rrggbb”, where rr, gg and bb are 8-bit hexadecimal digits. """ return str(arg) def cnv_configtype(attribute, arg, element): if str(arg) not in ("boolean", "short", "int", "long", "double", "string", "datetime", "base64Binary"): raise ValueError("'%s' not allowed" % str(arg)) return str(arg) def cnv_data_source_has_labels(attribute, arg, element): if str(arg) not in ("none", "row", "column", "both"): raise ValueError("'%s' not allowed" % str(arg)) return str(arg) # Understand different date formats def cnv_date(attribute, arg, element): """ A dateOrDateTime value is either an [xmlschema-2] date value or an [xmlschema-2] dateTime value. """ return str(arg) def cnv_dateTime(attribute, arg, element): """ A dateOrDateTime value is either an [xmlschema-2] date value or an [xmlschema-2] dateTime value. """ return str(arg) def cnv_double(attribute, arg, element): return str(arg) def cnv_duration(attribute, arg, element): return str(arg) def cnv_family(attribute, arg, element): """ A style family """ if str(arg) not in ( "text", "paragraph", "section", "ruby", "table", "table-column", "table-row", "table-cell", "graphic", "presentation", "drawing-page", "chart"): raise ValueError("'%s' not allowed" % str(arg)) return str(arg) def __save_prefix(attribute, arg, element): prefix = arg.split(':', 1)[0] if prefix == arg: return unicode(arg) namespace = element.get_knownns(prefix) if namespace is None: #raise ValueError, "'%s' is an unknown prefix" % str(prefix) return unicode(arg) p = element.get_nsprefix(namespace) return unicode(arg) def cnv_formula(attribute, arg, element): """ A string containing a formula. Formulas do not have a predefined syntax, but the string should begin with a namespace prefix, followed by a “:” (COLON, U+003A) separator, followed by the text of the formula. The namespace bound to the prefix determines the syntax and semantics of the formula. """ return __save_prefix(attribute, arg, element) def cnv_ID(attribute, arg, element): return str(arg) def cnv_IDREF(attribute, arg, element): return str(arg) def cnv_integer(attribute, arg, element): return str(arg) def cnv_legend_position(attribute, arg, element): if str(arg) not in ("start", "end", "top", "bottom", "top-start", "bottom-start", "top-end", "bottom-end"): raise ValueError("'%s' not allowed" % str(arg)) return str(arg) pattern_length = re.compile( r'-?([0-9]+(\.[0-9]*)?|\.[0-9]+)((cm)|(mm)|(in)|(pt)|(pc)|(px))') def cnv_length(attribute, arg, element): """ A (positive or negative) physical length, consisting of magnitude and unit, in conformance with the Units of Measure defined in §5.9.13 of [XSL]. """ global pattern_length if not pattern_length.match(arg): raise ValueError("'%s' is not a valid length" % arg) return arg def cnv_lengthorpercent(attribute, arg, element): failed = False try: return cnv_length(attribute, arg, element) except BaseException: failed = True try: return cnv_percent(attribute, arg, element) except BaseException: failed = True if failed: raise ValueError("'%s' is not a valid length or percent" % arg) return arg def cnv_metavaluetype(attribute, arg, element): if str(arg) not in ("float", "date", "time", "boolean", "string"): raise ValueError("'%s' not allowed" % str(arg)) return str(arg) def cnv_major_minor(attribute, arg, element): if arg not in ('major', 'minor'): raise ValueError("'%s' is not either 'minor' or 'major'" % arg) pattern_namespacedToken = re.compile(r'[0-9a-zA-Z_]+:[0-9a-zA-Z._\-]+') def cnv_namespacedToken(attribute, arg, element): global pattern_namespacedToken if not pattern_namespacedToken.match(arg): raise ValueError("'%s' is not a valid namespaced token" % arg) return __save_prefix(attribute, arg, element) def cnv_NCName(attribute, arg, element): """ NCName is defined in http://www.w3.org/TR/REC-xml-names/#NT-NCName Essentially an XML name minus ':' """ if type(arg) in types.StringTypes: return make_NCName(arg) else: return arg.getAttrNS(STYLENS, 'name') # This function takes either an instance of a style (preferred) # or a text string naming the style. If it is a text string, then it must # already have been converted to an NCName # The text-string argument is mainly for when we build a structure from XML def cnv_StyleNameRef(attribute, arg, element): try: return arg.getAttrNS(STYLENS, 'name') except BaseException: return arg # This function takes either an instance of a style (preferred) # or a text string naming the style. If it is a text string, then it must # already have been converted to an NCName # The text-string argument is mainly for when we build a structure from XML def cnv_DrawNameRef(attribute, arg, element): try: return arg.getAttrNS(DRAWNS, 'name') except BaseException: return arg # Must accept list of Style objects def cnv_NCNames(attribute, arg, element): return ' '.join(arg) def cnv_nonNegativeInteger(attribute, arg, element): return str(arg) pattern_percent = re.compile(r'-?([0-9]+(\.[0-9]*)?|\.[0-9]+)%') def cnv_percent(attribute, arg, element): global pattern_percent if not pattern_percent.match(arg): raise ValueError("'%s' is not a valid length" % arg) return arg # Real one doesn't allow floating point values pattern_points = re.compile(r'-?[0-9]+,-?[0-9]+([ ]+-?[0-9]+,-?[0-9]+)*') #pattern_points = re.compile(r'-?[0-9.]+,-?[0-9.]+([ ]+-?[0-9.]+,-?[0-9.]+)*') def cnv_points(attribute, arg, element): global pattern_points if type(arg) in types.StringTypes: if not pattern_points.match(arg): raise ValueError( "x,y are separated by a comma and the points are separated by white spaces") return arg else: try: strarg = ' '.join(["%d,%d" % p for p in arg]) except BaseException: raise ValueError( "Points must be string or [(0,0),(1,1)] - not %s" % arg) return strarg def cnv_positiveInteger(attribute, arg, element): return str(arg) def cnv_string(attribute, arg, element): return unicode(arg) def cnv_textnoteclass(attribute, arg, element): if str(arg) not in ("footnote", "endnote"): raise ValueError("'%s' not allowed" % str(arg)) return str(arg) # Understand different time formats def cnv_time(attribute, arg, element): return str(arg) def cnv_token(attribute, arg, element): return str(arg) pattern_viewbox = re.compile(r'-?[0-9]+([ ]+-?[0-9]+){3}$') def cnv_viewbox(attribute, arg, element): global pattern_viewbox if not pattern_viewbox.match(arg): raise ValueError( "viewBox must be four integers separated by whitespaces") return arg def cnv_xlinkshow(attribute, arg, element): if str(arg) not in ("new", "replace", "embed"): raise ValueError("'%s' not allowed" % str(arg)) return str(arg) attrconverters = { ((ANIMNS, u'audio-level'), None): cnv_double, ((ANIMNS, u'color-interpolation'), None): cnv_string, ((ANIMNS, u'color-interpolation-direction'), None): cnv_string, ((ANIMNS, u'command'), None): cnv_string, ((ANIMNS, u'formula'), None): cnv_string, ((ANIMNS, u'id'), None): cnv_ID, ((ANIMNS, u'iterate-interval'), None): cnv_duration, ((ANIMNS, u'iterate-type'), None): cnv_string, ((ANIMNS, u'name'), None): cnv_string, ((ANIMNS, u'sub-item'), None): cnv_string, ((ANIMNS, u'value'), None): cnv_string, # ((DBNS,u'type'), None): cnv_namespacedToken, ((CHARTNS, u'attached-axis'), None): cnv_string, ((CHARTNS, u'class'), (CHARTNS, u'grid')): cnv_major_minor, ((CHARTNS, u'class'), None): cnv_namespacedToken, ((CHARTNS, u'column-mapping'), None): cnv_string, ((CHARTNS, u'connect-bars'), None): cnv_boolean, ((CHARTNS, u'data-label-number'), None): cnv_string, ((CHARTNS, u'data-label-symbol'), None): cnv_boolean, ((CHARTNS, u'data-label-text'), None): cnv_boolean, ((CHARTNS, u'data-source-has-labels'), None): cnv_data_source_has_labels, ((CHARTNS, u'deep'), None): cnv_boolean, ((CHARTNS, u'dimension'), None): cnv_string, ((CHARTNS, u'display-label'), None): cnv_boolean, ((CHARTNS, u'error-category'), None): cnv_string, ((CHARTNS, u'error-lower-indicator'), None): cnv_boolean, ((CHARTNS, u'error-lower-limit'), None): cnv_string, ((CHARTNS, u'error-margin'), None): cnv_string, ((CHARTNS, u'error-percentage'), None): cnv_string, ((CHARTNS, u'error-upper-indicator'), None): cnv_boolean, ((CHARTNS, u'error-upper-limit'), None): cnv_string, ((CHARTNS, u'gap-width'), None): cnv_string, ((CHARTNS, u'interpolation'), None): cnv_string, ((CHARTNS, u'interval-major'), None): cnv_string, ((CHARTNS, u'interval-minor-divisor'), None): cnv_string, ((CHARTNS, u'japanese-candle-stick'), None): cnv_boolean, ((CHARTNS, u'label-arrangement'), None): cnv_string, ((CHARTNS, u'label-cell-address'), None): cnv_string, ((CHARTNS, u'legend-align'), None): cnv_string, ((CHARTNS, u'legend-position'), None): cnv_legend_position, ((CHARTNS, u'lines'), None): cnv_boolean, ((CHARTNS, u'link-data-style-to-source'), None): cnv_boolean, ((CHARTNS, u'logarithmic'), None): cnv_boolean, ((CHARTNS, u'maximum'), None): cnv_string, ((CHARTNS, u'mean-value'), None): cnv_boolean, ((CHARTNS, u'minimum'), None): cnv_string, ((CHARTNS, u'name'), None): cnv_string, ((CHARTNS, u'origin'), None): cnv_string, ((CHARTNS, u'overlap'), None): cnv_string, ((CHARTNS, u'percentage'), None): cnv_boolean, ((CHARTNS, u'pie-offset'), None): cnv_string, ((CHARTNS, u'regression-type'), None): cnv_string, ((CHARTNS, u'repeated'), None): cnv_nonNegativeInteger, ((CHARTNS, u'row-mapping'), None): cnv_string, ((CHARTNS, u'scale-text'), None): cnv_boolean, ((CHARTNS, u'series-source'), None): cnv_string, ((CHARTNS, u'solid-type'), None): cnv_string, ((CHARTNS, u'spline-order'), None): cnv_string, ((CHARTNS, u'spline-resolution'), None): cnv_string, ((CHARTNS, u'stacked'), None): cnv_boolean, ((CHARTNS, u'style-name'), None): cnv_StyleNameRef, ((CHARTNS, u'symbol-height'), None): cnv_string, ((CHARTNS, u'symbol-name'), None): cnv_string, ((CHARTNS, u'symbol-type'), None): cnv_string, ((CHARTNS, u'symbol-width'), None): cnv_string, ((CHARTNS, u'text-overlap'), None): cnv_boolean, ((CHARTNS, u'three-dimensional'), None): cnv_boolean, ((CHARTNS, u'tick-marks-major-inner'), None): cnv_boolean, ((CHARTNS, u'tick-marks-major-outer'), None): cnv_boolean, ((CHARTNS, u'tick-marks-minor-inner'), None): cnv_boolean, ((CHARTNS, u'tick-marks-minor-outer'), None): cnv_boolean, ((CHARTNS, u'values-cell-range-address'), None): cnv_string, ((CHARTNS, u'vertical'), None): cnv_boolean, ((CHARTNS, u'visible'), None): cnv_boolean, ((CONFIGNS, u'name'), None): cnv_formula, ((CONFIGNS, u'type'), None): cnv_configtype, ((DR3DNS, u'ambient-color'), None): cnv_string, ((DR3DNS, u'back-scale'), None): cnv_string, ((DR3DNS, u'backface-culling'), None): cnv_string, ((DR3DNS, u'center'), None): cnv_string, ((DR3DNS, u'close-back'), None): cnv_boolean, ((DR3DNS, u'close-front'), None): cnv_boolean, ((DR3DNS, u'depth'), None): cnv_length, ((DR3DNS, u'diffuse-color'), None): cnv_string, ((DR3DNS, u'direction'), None): cnv_string, ((DR3DNS, u'distance'), None): cnv_length, ((DR3DNS, u'edge-rounding'), None): cnv_string, ((DR3DNS, u'edge-rounding-mode'), None): cnv_string, ((DR3DNS, u'emissive-color'), None): cnv_string, ((DR3DNS, u'enabled'), None): cnv_boolean, ((DR3DNS, u'end-angle'), None): cnv_string, ((DR3DNS, u'focal-length'), None): cnv_length, ((DR3DNS, u'horizontal-segments'), None): cnv_string, ((DR3DNS, u'lighting-mode'), None): cnv_boolean, ((DR3DNS, u'max-edge'), None): cnv_string, ((DR3DNS, u'min-edge'), None): cnv_string, ((DR3DNS, u'normals-direction'), None): cnv_string, ((DR3DNS, u'normals-kind'), None): cnv_string, ((DR3DNS, u'projection'), None): cnv_string, ((DR3DNS, u'shade-mode'), None): cnv_string, ((DR3DNS, u'shadow'), None): cnv_string, ((DR3DNS, u'shadow-slant'), None): cnv_nonNegativeInteger, ((DR3DNS, u'shininess'), None): cnv_string, ((DR3DNS, u'size'), None): cnv_string, ((DR3DNS, u'specular'), None): cnv_boolean, ((DR3DNS, u'specular-color'), None): cnv_string, ((DR3DNS, u'texture-filter'), None): cnv_string, ((DR3DNS, u'texture-generation-mode-x'), None): cnv_string, ((DR3DNS, u'texture-generation-mode-y'), None): cnv_string, ((DR3DNS, u'texture-kind'), None): cnv_string, ((DR3DNS, u'texture-mode'), None): cnv_string, ((DR3DNS, u'transform'), None): cnv_string, ((DR3DNS, u'vertical-segments'), None): cnv_string, ((DR3DNS, u'vpn'), None): cnv_string, ((DR3DNS, u'vrp'), None): cnv_string, ((DR3DNS, u'vup'), None): cnv_string, ((DRAWNS, u'align'), None): cnv_string, ((DRAWNS, u'angle'), None): cnv_integer, ((DRAWNS, u'archive'), None): cnv_string, ((DRAWNS, u'auto-grow-height'), None): cnv_boolean, ((DRAWNS, u'auto-grow-width'), None): cnv_boolean, ((DRAWNS, u'background-size'), None): cnv_string, ((DRAWNS, u'blue'), None): cnv_string, ((DRAWNS, u'border'), None): cnv_string, ((DRAWNS, u'caption-angle'), None): cnv_string, ((DRAWNS, u'caption-angle-type'), None): cnv_string, ((DRAWNS, u'caption-escape'), None): cnv_string, ((DRAWNS, u'caption-escape-direction'), None): cnv_string, ((DRAWNS, u'caption-fit-line-length'), None): cnv_boolean, ((DRAWNS, u'caption-gap'), None): cnv_string, ((DRAWNS, u'caption-line-length'), None): cnv_length, ((DRAWNS, u'caption-point-x'), None): cnv_string, ((DRAWNS, u'caption-point-y'), None): cnv_string, ((DRAWNS, u'caption-id'), None): cnv_IDREF, ((DRAWNS, u'caption-type'), None): cnv_string, ((DRAWNS, u'chain-next-name'), None): cnv_string, ((DRAWNS, u'class-id'), None): cnv_string, ((DRAWNS, u'class-names'), None): cnv_NCNames, ((DRAWNS, u'code'), None): cnv_string, ((DRAWNS, u'color'), None): cnv_string, ((DRAWNS, u'color-inversion'), None): cnv_boolean, ((DRAWNS, u'color-mode'), None): cnv_string, ((DRAWNS, u'concave'), None): cnv_string, ((DRAWNS, u'concentric-gradient-fill-allowed'), None): cnv_boolean, ((DRAWNS, u'contrast'), None): cnv_string, ((DRAWNS, u'control'), None): cnv_IDREF, ((DRAWNS, u'copy-of'), None): cnv_string, ((DRAWNS, u'corner-radius'), None): cnv_length, ((DRAWNS, u'corners'), None): cnv_positiveInteger, ((DRAWNS, u'cx'), None): cnv_string, ((DRAWNS, u'cy'), None): cnv_string, ((DRAWNS, u'data'), None): cnv_string, ((DRAWNS, u'decimal-places'), None): cnv_string, ((DRAWNS, u'display'), None): cnv_string, ((DRAWNS, u'display-name'), None): cnv_string, ((DRAWNS, u'distance'), None): cnv_lengthorpercent, ((DRAWNS, u'dots1'), None): cnv_integer, ((DRAWNS, u'dots1-length'), None): cnv_lengthorpercent, ((DRAWNS, u'dots2'), None): cnv_integer, ((DRAWNS, u'dots2-length'), None): cnv_lengthorpercent, ((DRAWNS, u'end-angle'), None): cnv_double, ((DRAWNS, u'end'), None): cnv_string, ((DRAWNS, u'end-color'), None): cnv_string, ((DRAWNS, u'end-glue-point'), None): cnv_nonNegativeInteger, ((DRAWNS, u'end-guide'), None): cnv_length, ((DRAWNS, u'end-intensity'), None): cnv_string, ((DRAWNS, u'end-line-spacing-horizontal'), None): cnv_string, ((DRAWNS, u'end-line-spacing-vertical'), None): cnv_string, ((DRAWNS, u'end-shape'), None): cnv_IDREF, ((DRAWNS, u'engine'), None): cnv_namespacedToken, ((DRAWNS, u'enhanced-path'), None): cnv_string, ((DRAWNS, u'escape-direction'), None): cnv_string, ((DRAWNS, u'extrusion-allowed'), None): cnv_boolean, ((DRAWNS, u'extrusion-brightness'), None): cnv_string, ((DRAWNS, u'extrusion'), None): cnv_boolean, ((DRAWNS, u'extrusion-color'), None): cnv_boolean, ((DRAWNS, u'extrusion-depth'), None): cnv_double, ((DRAWNS, u'extrusion-diffusion'), None): cnv_string, ((DRAWNS, u'extrusion-first-light-direction'), None): cnv_string, ((DRAWNS, u'extrusion-first-light-harsh'), None): cnv_boolean, ((DRAWNS, u'extrusion-first-light-level'), None): cnv_string, ((DRAWNS, u'extrusion-light-face'), None): cnv_boolean, ((DRAWNS, u'extrusion-metal'), None): cnv_boolean, ((DRAWNS, u'extrusion-number-of-line-segments'), None): cnv_integer, ((DRAWNS, u'extrusion-origin'), None): cnv_double, ((DRAWNS, u'extrusion-rotation-angle'), None): cnv_double, ((DRAWNS, u'extrusion-rotation-center'), None): cnv_string, ((DRAWNS, u'extrusion-second-light-direction'), None): cnv_string, ((DRAWNS, u'extrusion-second-light-harsh'), None): cnv_boolean, ((DRAWNS, u'extrusion-second-light-level'), None): cnv_string, ((DRAWNS, u'extrusion-shininess'), None): cnv_string, ((DRAWNS, u'extrusion-skew'), None): cnv_double, ((DRAWNS, u'extrusion-specularity'), None): cnv_string, ((DRAWNS, u'extrusion-viewpoint'), None): cnv_string, ((DRAWNS, u'fill'), None): cnv_string, ((DRAWNS, u'fill-color'), None): cnv_string, ((DRAWNS, u'fill-gradient-name'), None): cnv_string, ((DRAWNS, u'fill-hatch-name'), None): cnv_string, ((DRAWNS, u'fill-hatch-solid'), None): cnv_boolean, ((DRAWNS, u'fill-image-height'), None): cnv_lengthorpercent, ((DRAWNS, u'fill-image-name'), None): cnv_DrawNameRef, ((DRAWNS, u'fill-image-ref-point'), None): cnv_string, ((DRAWNS, u'fill-image-ref-point-x'), None): cnv_string, ((DRAWNS, u'fill-image-ref-point-y'), None): cnv_string, ((DRAWNS, u'fill-image-width'), None): cnv_lengthorpercent, ((DRAWNS, u'filter-name'), None): cnv_string, ((DRAWNS, u'fit-to-contour'), None): cnv_boolean, ((DRAWNS, u'fit-to-size'), None): cnv_string, # ODF 1.2 says boolean ((DRAWNS, u'formula'), None): cnv_string, ((DRAWNS, u'frame-display-border'), None): cnv_boolean, ((DRAWNS, u'frame-display-scrollbar'), None): cnv_boolean, ((DRAWNS, u'frame-margin-horizontal'), None): cnv_string, ((DRAWNS, u'frame-margin-vertical'), None): cnv_string, ((DRAWNS, u'frame-name'), None): cnv_string, ((DRAWNS, u'gamma'), None): cnv_string, ((DRAWNS, u'glue-point-leaving-directions'), None): cnv_string, ((DRAWNS, u'glue-point-type'), None): cnv_string, ((DRAWNS, u'glue-points'), None): cnv_string, ((DRAWNS, u'gradient-step-count'), None): cnv_string, ((DRAWNS, u'green'), None): cnv_string, ((DRAWNS, u'guide-distance'), None): cnv_string, ((DRAWNS, u'guide-overhang'), None): cnv_length, ((DRAWNS, u'handle-mirror-horizontal'), None): cnv_boolean, ((DRAWNS, u'handle-mirror-vertical'), None): cnv_boolean, ((DRAWNS, u'handle-polar'), None): cnv_string, ((DRAWNS, u'handle-position'), None): cnv_string, ((DRAWNS, u'handle-radius-range-maximum'), None): cnv_string, ((DRAWNS, u'handle-radius-range-minimum'), None): cnv_string, ((DRAWNS, u'handle-range-x-maximum'), None): cnv_string, ((DRAWNS, u'handle-range-x-minimum'), None): cnv_string, ((DRAWNS, u'handle-range-y-maximum'), None): cnv_string, ((DRAWNS, u'handle-range-y-minimum'), None): cnv_string, ((DRAWNS, u'handle-switched'), None): cnv_boolean, # ((DRAWNS,u'id'), None): cnv_ID, # ((DRAWNS,u'id'), None): cnv_nonNegativeInteger, # ?? line 6581 in RNG ((DRAWNS, u'id'), None): cnv_string, ((DRAWNS, u'image-opacity'), None): cnv_string, ((DRAWNS, u'kind'), None): cnv_string, ((DRAWNS, u'layer'), None): cnv_string, ((DRAWNS, u'line-distance'), None): cnv_string, ((DRAWNS, u'line-skew'), None): cnv_string, ((DRAWNS, u'luminance'), None): cnv_string, ((DRAWNS, u'marker-end-center'), None): cnv_boolean, ((DRAWNS, u'marker-end'), None): cnv_string, ((DRAWNS, u'marker-end-width'), None): cnv_length, ((DRAWNS, u'marker-start-center'), None): cnv_boolean, ((DRAWNS, u'marker-start'), None): cnv_string, ((DRAWNS, u'marker-start-width'), None): cnv_length, ((DRAWNS, u'master-page-name'), None): cnv_StyleNameRef, ((DRAWNS, u'may-script'), None): cnv_boolean, ((DRAWNS, u'measure-align'), None): cnv_string, ((DRAWNS, u'measure-vertical-align'), None): cnv_string, ((DRAWNS, u'mime-type'), None): cnv_string, ((DRAWNS, u'mirror-horizontal'), None): cnv_boolean, ((DRAWNS, u'mirror-vertical'), None): cnv_boolean, ((DRAWNS, u'modifiers'), None): cnv_string, ((DRAWNS, u'name'), None): cnv_NCName, # ((DRAWNS,u'name'), None): cnv_string, ((DRAWNS, u'nav-order'), None): cnv_IDREF, ((DRAWNS, u'nohref'), None): cnv_string, ((DRAWNS, u'notify-on-update-of-ranges'), None): cnv_string, ((DRAWNS, u'object'), None): cnv_string, ((DRAWNS, u'ole-draw-aspect'), None): cnv_string, ((DRAWNS, u'opacity'), None): cnv_string, ((DRAWNS, u'opacity-name'), None): cnv_string, ((DRAWNS, u'page-number'), None): cnv_positiveInteger, ((DRAWNS, u'parallel'), None): cnv_boolean, ((DRAWNS, u'path-stretchpoint-x'), None): cnv_double, ((DRAWNS, u'path-stretchpoint-y'), None): cnv_double, ((DRAWNS, u'placing'), None): cnv_string, ((DRAWNS, u'points'), None): cnv_points, ((DRAWNS, u'protected'), None): cnv_boolean, ((DRAWNS, u'recreate-on-edit'), None): cnv_boolean, ((DRAWNS, u'red'), None): cnv_string, ((DRAWNS, u'rotation'), None): cnv_integer, ((DRAWNS, u'secondary-fill-color'), None): cnv_string, ((DRAWNS, u'shadow'), None): cnv_string, ((DRAWNS, u'shadow-color'), None): cnv_string, ((DRAWNS, u'shadow-offset-x'), None): cnv_length, ((DRAWNS, u'shadow-offset-y'), None): cnv_length, ((DRAWNS, u'shadow-opacity'), None): cnv_string, ((DRAWNS, u'shape-id'), None): cnv_IDREF, ((DRAWNS, u'sharpness'), None): cnv_string, ((DRAWNS, u'show-unit'), None): cnv_boolean, ((DRAWNS, u'start-angle'), None): cnv_double, ((DRAWNS, u'start'), None): cnv_string, ((DRAWNS, u'start-color'), None): cnv_string, ((DRAWNS, u'start-glue-point'), None): cnv_nonNegativeInteger, ((DRAWNS, u'start-guide'), None): cnv_length, ((DRAWNS, u'start-intensity'), None): cnv_string, ((DRAWNS, u'start-line-spacing-horizontal'), None): cnv_string, ((DRAWNS, u'start-line-spacing-vertical'), None): cnv_string, ((DRAWNS, u'start-shape'), None): cnv_IDREF, ((DRAWNS, u'stroke'), None): cnv_string, ((DRAWNS, u'stroke-dash'), None): cnv_string, ((DRAWNS, u'stroke-dash-names'), None): cnv_string, ((DRAWNS, u'stroke-linejoin'), None): cnv_string, ((DRAWNS, u'style'), None): cnv_string, ((DRAWNS, u'style-name'), None): cnv_StyleNameRef, ((DRAWNS, u'symbol-color'), None): cnv_string, ((DRAWNS, u'text-areas'), None): cnv_string, ((DRAWNS, u'text-path-allowed'), None): cnv_boolean, ((DRAWNS, u'text-path'), None): cnv_boolean, ((DRAWNS, u'text-path-mode'), None): cnv_string, ((DRAWNS, u'text-path-same-letter-heights'), None): cnv_boolean, ((DRAWNS, u'text-path-scale'), None): cnv_string, ((DRAWNS, u'text-rotate-angle'), None): cnv_double, ((DRAWNS, u'text-style-name'), None): cnv_StyleNameRef, ((DRAWNS, u'textarea-horizontal-align'), None): cnv_string, ((DRAWNS, u'textarea-vertical-align'), None): cnv_string, ((DRAWNS, u'tile-repeat-offset'), None): cnv_string, ((DRAWNS, u'transform'), None): cnv_string, ((DRAWNS, u'type'), None): cnv_string, ((DRAWNS, u'unit'), None): cnv_string, ((DRAWNS, u'value'), None): cnv_string, ((DRAWNS, u'visible-area-height'), None): cnv_string, ((DRAWNS, u'visible-area-left'), None): cnv_string, ((DRAWNS, u'visible-area-top'), None): cnv_string, ((DRAWNS, u'visible-area-width'), None): cnv_string, ((DRAWNS, u'wrap-influence-on-position'), None): cnv_string, ((DRAWNS, u'z-index'), None): cnv_nonNegativeInteger, ((FONS, u'background-color'), None): cnv_string, ((FONS, u'border-bottom'), None): cnv_string, ((FONS, u'border'), None): cnv_string, ((FONS, u'border-left'), None): cnv_string, ((FONS, u'border-right'), None): cnv_string, ((FONS, u'border-top'), None): cnv_string, ((FONS, u'break-after'), None): cnv_string, ((FONS, u'break-before'), None): cnv_string, ((FONS, u'clip'), None): cnv_string, ((FONS, u'color'), None): cnv_string, ((FONS, u'column-count'), None): cnv_positiveInteger, ((FONS, u'column-gap'), None): cnv_length, ((FONS, u'country'), None): cnv_token, ((FONS, u'end-indent'), None): cnv_length, ((FONS, u'font-family'), None): cnv_string, ((FONS, u'font-size'), None): cnv_string, ((FONS, u'font-style'), None): cnv_string, ((FONS, u'font-variant'), None): cnv_string, ((FONS, u'font-weight'), None): cnv_string, ((FONS, u'height'), None): cnv_string, ((FONS, u'hyphenate'), None): cnv_boolean, ((FONS, u'hyphenation-keep'), None): cnv_string, ((FONS, u'hyphenation-ladder-count'), None): cnv_string, ((FONS, u'hyphenation-push-char-count'), None): cnv_string, ((FONS, u'hyphenation-remain-char-count'), None): cnv_string, ((FONS, u'keep-together'), None): cnv_string, ((FONS, u'keep-with-next'), None): cnv_string, ((FONS, u'language'), None): cnv_token, ((FONS, u'letter-spacing'), None): cnv_string, ((FONS, u'line-height'), None): cnv_string, ((FONS, u'margin-bottom'), None): cnv_string, ((FONS, u'margin'), None): cnv_string, ((FONS, u'margin-left'), None): cnv_string, ((FONS, u'margin-right'), None): cnv_string, ((FONS, u'margin-top'), None): cnv_string, ((FONS, u'max-height'), None): cnv_string, ((FONS, u'max-width'), None): cnv_string, ((FONS, u'min-height'), None): cnv_length, ((FONS, u'min-width'), None): cnv_string, ((FONS, u'orphans'), None): cnv_string, ((FONS, u'padding-bottom'), None): cnv_string, ((FONS, u'padding'), None): cnv_string, ((FONS, u'padding-left'), None): cnv_string, ((FONS, u'padding-right'), None): cnv_string, ((FONS, u'padding-top'), None): cnv_string, ((FONS, u'page-height'), None): cnv_length, ((FONS, u'page-width'), None): cnv_length, ((FONS, u'space-after'), None): cnv_length, ((FONS, u'space-before'), None): cnv_length, ((FONS, u'start-indent'), None): cnv_length, ((FONS, u'text-align'), None): cnv_string, ((FONS, u'text-align-last'), None): cnv_string, ((FONS, u'text-indent'), None): cnv_string, ((FONS, u'text-shadow'), None): cnv_string, ((FONS, u'text-transform'), None): cnv_string, ((FONS, u'widows'), None): cnv_string, ((FONS, u'width'), None): cnv_string, ((FONS, u'wrap-option'), None): cnv_string, ((FORMNS, u'allow-deletes'), None): cnv_boolean, ((FORMNS, u'allow-inserts'), None): cnv_boolean, ((FORMNS, u'allow-updates'), None): cnv_boolean, ((FORMNS, u'apply-design-mode'), None): cnv_boolean, ((FORMNS, u'apply-filter'), None): cnv_boolean, ((FORMNS, u'auto-complete'), None): cnv_boolean, ((FORMNS, u'automatic-focus'), None): cnv_boolean, ((FORMNS, u'bound-column'), None): cnv_string, ((FORMNS, u'button-type'), None): cnv_string, ((FORMNS, u'command'), None): cnv_string, ((FORMNS, u'command-type'), None): cnv_string, ((FORMNS, u'control-implementation'), None): cnv_namespacedToken, ((FORMNS, u'convert-empty-to-null'), None): cnv_boolean, ((FORMNS, u'current-selected'), None): cnv_boolean, ((FORMNS, u'current-state'), None): cnv_string, # ((FORMNS,u'current-value'), None): cnv_date, # ((FORMNS,u'current-value'), None): cnv_double, ((FORMNS, u'current-value'), None): cnv_string, # ((FORMNS,u'current-value'), None): cnv_time, ((FORMNS, u'data-field'), None): cnv_string, ((FORMNS, u'datasource'), None): cnv_string, ((FORMNS, u'default-button'), None): cnv_boolean, ((FORMNS, u'delay-for-repeat'), None): cnv_duration, ((FORMNS, u'detail-fields'), None): cnv_string, ((FORMNS, u'disabled'), None): cnv_boolean, ((FORMNS, u'dropdown'), None): cnv_boolean, ((FORMNS, u'echo-char'), None): cnv_string, ((FORMNS, u'enctype'), None): cnv_string, ((FORMNS, u'escape-processing'), None): cnv_boolean, ((FORMNS, u'filter'), None): cnv_string, ((FORMNS, u'focus-on-click'), None): cnv_boolean, ((FORMNS, u'for'), None): cnv_string, ((FORMNS, u'id'), None): cnv_ID, ((FORMNS, u'ignore-result'), None): cnv_boolean, ((FORMNS, u'image-align'), None): cnv_string, ((FORMNS, u'image-data'), None): cnv_anyURI, ((FORMNS, u'image-position'), None): cnv_string, ((FORMNS, u'is-tristate'), None): cnv_boolean, ((FORMNS, u'label'), None): cnv_string, ((FORMNS, u'list-source'), None): cnv_string, ((FORMNS, u'list-source-type'), None): cnv_string, ((FORMNS, u'master-fields'), None): cnv_string, ((FORMNS, u'max-length'), None): cnv_nonNegativeInteger, # ((FORMNS,u'max-value'), None): cnv_date, # ((FORMNS,u'max-value'), None): cnv_double, ((FORMNS, u'max-value'), None): cnv_string, # ((FORMNS,u'max-value'), None): cnv_time, ((FORMNS, u'method'), None): cnv_string, # ((FORMNS,u'min-value'), None): cnv_date, # ((FORMNS,u'min-value'), None): cnv_double, ((FORMNS, u'min-value'), None): cnv_string, # ((FORMNS,u'min-value'), None): cnv_time, ((FORMNS, u'multi-line'), None): cnv_boolean, ((FORMNS, u'multiple'), None): cnv_boolean, ((FORMNS, u'name'), None): cnv_string, ((FORMNS, u'navigation-mode'), None): cnv_string, ((FORMNS, u'order'), None): cnv_string, ((FORMNS, u'orientation'), None): cnv_string, ((FORMNS, u'page-step-size'), None): cnv_positiveInteger, ((FORMNS, u'printable'), None): cnv_boolean, ((FORMNS, u'property-name'), None): cnv_string, ((FORMNS, u'readonly'), None): cnv_boolean, ((FORMNS, u'selected'), None): cnv_boolean, ((FORMNS, u'size'), None): cnv_nonNegativeInteger, ((FORMNS, u'state'), None): cnv_string, ((FORMNS, u'step-size'), None): cnv_positiveInteger, ((FORMNS, u'tab-cycle'), None): cnv_string, ((FORMNS, u'tab-index'), None): cnv_nonNegativeInteger, ((FORMNS, u'tab-stop'), None): cnv_boolean, ((FORMNS, u'text-style-name'), None): cnv_StyleNameRef, ((FORMNS, u'title'), None): cnv_string, ((FORMNS, u'toggle'), None): cnv_boolean, ((FORMNS, u'validation'), None): cnv_boolean, # ((FORMNS,u'value'), None): cnv_date, # ((FORMNS,u'value'), None): cnv_double, ((FORMNS, u'value'), None): cnv_string, # ((FORMNS,u'value'), None): cnv_time, ((FORMNS, u'visual-effect'), None): cnv_string, ((FORMNS, u'xforms-list-source'), None): cnv_string, ((FORMNS, u'xforms-submission'), None): cnv_string, ((MANIFESTNS, 'algorithm-name'), None): cnv_string, ((MANIFESTNS, 'checksum'), None): cnv_string, ((MANIFESTNS, 'checksum-type'), None): cnv_string, ((MANIFESTNS, 'full-path'), None): cnv_string, ((MANIFESTNS, 'initialisation-vector'), None): cnv_string, ((MANIFESTNS, 'iteration-count'), None): cnv_nonNegativeInteger, ((MANIFESTNS, 'key-derivation-name'), None): cnv_string, ((MANIFESTNS, 'media-type'), None): cnv_string, ((MANIFESTNS, 'salt'), None): cnv_string, ((MANIFESTNS, 'size'), None): cnv_nonNegativeInteger, ((METANS, u'cell-count'), None): cnv_nonNegativeInteger, ((METANS, u'character-count'), None): cnv_nonNegativeInteger, ((METANS, u'date'), None): cnv_dateTime, ((METANS, u'delay'), None): cnv_duration, ((METANS, u'draw-count'), None): cnv_nonNegativeInteger, ((METANS, u'frame-count'), None): cnv_nonNegativeInteger, ((METANS, u'image-count'), None): cnv_nonNegativeInteger, ((METANS, u'name'), None): cnv_string, ((METANS, u'non-whitespace-character-count'), None): cnv_nonNegativeInteger, ((METANS, u'object-count'), None): cnv_nonNegativeInteger, ((METANS, u'ole-object-count'), None): cnv_nonNegativeInteger, ((METANS, u'page-count'), None): cnv_nonNegativeInteger, ((METANS, u'paragraph-count'), None): cnv_nonNegativeInteger, ((METANS, u'row-count'), None): cnv_nonNegativeInteger, ((METANS, u'sentence-count'), None): cnv_nonNegativeInteger, ((METANS, u'syllable-count'), None): cnv_nonNegativeInteger, ((METANS, u'table-count'), None): cnv_nonNegativeInteger, ((METANS, u'value-type'), None): cnv_metavaluetype, ((METANS, u'word-count'), None): cnv_nonNegativeInteger, ((NUMBERNS, u'automatic-order'), None): cnv_boolean, ((NUMBERNS, u'calendar'), None): cnv_string, ((NUMBERNS, u'country'), None): cnv_token, ((NUMBERNS, u'decimal-places'), None): cnv_integer, ((NUMBERNS, u'decimal-replacement'), None): cnv_string, ((NUMBERNS, u'denominator-value'), None): cnv_integer, ((NUMBERNS, u'display-factor'), None): cnv_double, ((NUMBERNS, u'format-source'), None): cnv_string, ((NUMBERNS, u'grouping'), None): cnv_boolean, ((NUMBERNS, u'language'), None): cnv_token, ((NUMBERNS, u'min-denominator-digits'), None): cnv_integer, ((NUMBERNS, u'min-exponent-digits'), None): cnv_integer, ((NUMBERNS, u'min-integer-digits'), None): cnv_integer, ((NUMBERNS, u'min-numerator-digits'), None): cnv_integer, ((NUMBERNS, u'position'), None): cnv_integer, ((NUMBERNS, u'possessive-form'), None): cnv_boolean, ((NUMBERNS, u'style'), None): cnv_string, ((NUMBERNS, u'textual'), None): cnv_boolean, ((NUMBERNS, u'title'), None): cnv_string, ((NUMBERNS, u'transliteration-country'), None): cnv_token, ((NUMBERNS, u'transliteration-format'), None): cnv_string, ((NUMBERNS, u'transliteration-language'), None): cnv_token, ((NUMBERNS, u'transliteration-style'), None): cnv_string, ((NUMBERNS, u'truncate-on-overflow'), None): cnv_boolean, ((OFFICENS, u'automatic-update'), None): cnv_boolean, ((OFFICENS, u'boolean-value'), None): cnv_boolean, ((OFFICENS, u'conversion-mode'), None): cnv_string, ((OFFICENS, u'currency'), None): cnv_string, ((OFFICENS, u'date-value'), None): cnv_dateTime, ((OFFICENS, u'dde-application'), None): cnv_string, ((OFFICENS, u'dde-item'), None): cnv_string, ((OFFICENS, u'dde-topic'), None): cnv_string, ((OFFICENS, u'display'), None): cnv_boolean, ((OFFICENS, u'mimetype'), None): cnv_string, ((OFFICENS, u'name'), None): cnv_string, ((OFFICENS, u'process-content'), None): cnv_boolean, ((OFFICENS, u'server-map'), None): cnv_boolean, ((OFFICENS, u'string-value'), None): cnv_string, ((OFFICENS, u'target-frame'), None): cnv_string, ((OFFICENS, u'target-frame-name'), None): cnv_string, ((OFFICENS, u'time-value'), None): cnv_duration, ((OFFICENS, u'title'), None): cnv_string, ((OFFICENS, u'value'), None): cnv_double, ((OFFICENS, u'value-type'), None): cnv_string, ((OFFICENS, u'version'), None): cnv_string, ((PRESENTATIONNS, u'action'), None): cnv_string, ((PRESENTATIONNS, u'animations'), None): cnv_string, ((PRESENTATIONNS, u'background-objects-visible'), None): cnv_boolean, ((PRESENTATIONNS, u'background-visible'), None): cnv_boolean, ((PRESENTATIONNS, u'class'), None): cnv_string, ((PRESENTATIONNS, u'class-names'), None): cnv_NCNames, ((PRESENTATIONNS, u'delay'), None): cnv_duration, ((PRESENTATIONNS, u'direction'), None): cnv_string, ((PRESENTATIONNS, u'display-date-time'), None): cnv_boolean, ((PRESENTATIONNS, u'display-footer'), None): cnv_boolean, ((PRESENTATIONNS, u'display-header'), None): cnv_boolean, ((PRESENTATIONNS, u'display-page-number'), None): cnv_boolean, ((PRESENTATIONNS, u'duration'), None): cnv_string, ((PRESENTATIONNS, u'effect'), None): cnv_string, ((PRESENTATIONNS, u'endless'), None): cnv_boolean, ((PRESENTATIONNS, u'force-manual'), None): cnv_boolean, ((PRESENTATIONNS, u'full-screen'), None): cnv_boolean, ((PRESENTATIONNS, u'group-id'), None): cnv_string, ((PRESENTATIONNS, u'master-element'), None): cnv_IDREF, ((PRESENTATIONNS, u'mouse-as-pen'), None): cnv_boolean, ((PRESENTATIONNS, u'mouse-visible'), None): cnv_boolean, ((PRESENTATIONNS, u'name'), None): cnv_string, ((PRESENTATIONNS, u'node-type'), None): cnv_string, ((PRESENTATIONNS, u'object'), None): cnv_string, ((PRESENTATIONNS, u'pages'), None): cnv_string, ((PRESENTATIONNS, u'path-id'), None): cnv_string, ((PRESENTATIONNS, u'pause'), None): cnv_duration, ((PRESENTATIONNS, u'placeholder'), None): cnv_boolean, ((PRESENTATIONNS, u'play-full'), None): cnv_boolean, ((PRESENTATIONNS, u'presentation-page-layout-name'), None): cnv_StyleNameRef, ((PRESENTATIONNS, u'preset-class'), None): cnv_string, ((PRESENTATIONNS, u'preset-id'), None): cnv_string, ((PRESENTATIONNS, u'preset-sub-type'), None): cnv_string, ((PRESENTATIONNS, u'show'), None): cnv_string, ((PRESENTATIONNS, u'show-end-of-presentation-slide'), None): cnv_boolean, ((PRESENTATIONNS, u'show-logo'), None): cnv_boolean, ((PRESENTATIONNS, u'source'), None): cnv_string, ((PRESENTATIONNS, u'speed'), None): cnv_string, ((PRESENTATIONNS, u'start-page'), None): cnv_string, ((PRESENTATIONNS, u'start-scale'), None): cnv_string, ((PRESENTATIONNS, u'start-with-navigator'), None): cnv_boolean, ((PRESENTATIONNS, u'stay-on-top'), None): cnv_boolean, ((PRESENTATIONNS, u'style-name'), None): cnv_StyleNameRef, ((PRESENTATIONNS, u'transition-on-click'), None): cnv_string, ((PRESENTATIONNS, u'transition-speed'), None): cnv_string, ((PRESENTATIONNS, u'transition-style'), None): cnv_string, ((PRESENTATIONNS, u'transition-type'), None): cnv_string, ((PRESENTATIONNS, u'use-date-time-name'), None): cnv_string, ((PRESENTATIONNS, u'use-footer-name'), None): cnv_string, ((PRESENTATIONNS, u'use-header-name'), None): cnv_string, ((PRESENTATIONNS, u'user-transformed'), None): cnv_boolean, ((PRESENTATIONNS, u'verb'), None): cnv_nonNegativeInteger, ((PRESENTATIONNS, u'visibility'), None): cnv_string, ((SCRIPTNS, u'event-name'), None): cnv_formula, ((SCRIPTNS, u'language'), None): cnv_formula, ((SCRIPTNS, u'macro-name'), None): cnv_string, ((SMILNS, u'accelerate'), None): cnv_double, ((SMILNS, u'accumulate'), None): cnv_string, ((SMILNS, u'additive'), None): cnv_string, ((SMILNS, u'attributeName'), None): cnv_string, ((SMILNS, u'autoReverse'), None): cnv_boolean, ((SMILNS, u'begin'), None): cnv_string, ((SMILNS, u'by'), None): cnv_string, ((SMILNS, u'calcMode'), None): cnv_string, ((SMILNS, u'decelerate'), None): cnv_double, ((SMILNS, u'direction'), None): cnv_string, ((SMILNS, u'dur'), None): cnv_string, ((SMILNS, u'end'), None): cnv_string, ((SMILNS, u'endsync'), None): cnv_string, ((SMILNS, u'fadeColor'), None): cnv_string, ((SMILNS, u'fill'), None): cnv_string, ((SMILNS, u'fillDefault'), None): cnv_string, ((SMILNS, u'from'), None): cnv_string, ((SMILNS, u'keySplines'), None): cnv_string, ((SMILNS, u'keyTimes'), None): cnv_string, ((SMILNS, u'mode'), None): cnv_string, ((SMILNS, u'repeatCount'), None): cnv_nonNegativeInteger, ((SMILNS, u'repeatDur'), None): cnv_string, ((SMILNS, u'restart'), None): cnv_string, ((SMILNS, u'restartDefault'), None): cnv_string, ((SMILNS, u'subtype'), None): cnv_string, ((SMILNS, u'targetElement'), None): cnv_IDREF, ((SMILNS, u'to'), None): cnv_string, ((SMILNS, u'type'), None): cnv_string, ((SMILNS, u'values'), None): cnv_string, ((STYLENS, u'adjustment'), None): cnv_string, ((STYLENS, u'apply-style-name'), None): cnv_StyleNameRef, ((STYLENS, u'auto-text-indent'), None): cnv_boolean, ((STYLENS, u'auto-update'), None): cnv_boolean, ((STYLENS, u'background-transparency'), None): cnv_string, ((STYLENS, u'base-cell-address'), None): cnv_string, ((STYLENS, u'border-line-width-bottom'), None): cnv_string, ((STYLENS, u'border-line-width'), None): cnv_string, ((STYLENS, u'border-line-width-left'), None): cnv_string, ((STYLENS, u'border-line-width-right'), None): cnv_string, ((STYLENS, u'border-line-width-top'), None): cnv_string, ((STYLENS, u'cell-protect'), None): cnv_string, ((STYLENS, u'char'), None): cnv_string, ((STYLENS, u'class'), None): cnv_string, ((STYLENS, u'color'), None): cnv_string, ((STYLENS, u'column-width'), None): cnv_string, ((STYLENS, u'condition'), None): cnv_string, ((STYLENS, u'country-asian'), None): cnv_string, ((STYLENS, u'country-complex'), None): cnv_string, ((STYLENS, u'data-style-name'), None): cnv_StyleNameRef, ((STYLENS, u'decimal-places'), None): cnv_string, ((STYLENS, u'default-outline-level'), None): cnv_positiveInteger, ((STYLENS, u'diagonal-bl-tr'), None): cnv_string, ((STYLENS, u'diagonal-bl-tr-widths'), None): cnv_string, ((STYLENS, u'diagonal-tl-br'), None): cnv_string, ((STYLENS, u'diagonal-tl-br-widths'), None): cnv_string, ((STYLENS, u'direction'), None): cnv_string, ((STYLENS, u'display'), None): cnv_boolean, ((STYLENS, u'display-name'), None): cnv_string, ((STYLENS, u'distance-after-sep'), None): cnv_length, ((STYLENS, u'distance-before-sep'), None): cnv_length, ((STYLENS, u'distance'), None): cnv_length, ((STYLENS, u'dynamic-spacing'), None): cnv_boolean, ((STYLENS, u'editable'), None): cnv_boolean, ((STYLENS, u'family'), None): cnv_family, ((STYLENS, u'filter-name'), None): cnv_string, ((STYLENS, u'first-page-number'), None): cnv_string, ((STYLENS, u'flow-with-text'), None): cnv_boolean, ((STYLENS, u'font-adornments'), None): cnv_string, ((STYLENS, u'font-charset'), None): cnv_string, ((STYLENS, u'font-charset-asian'), None): cnv_string, ((STYLENS, u'font-charset-complex'), None): cnv_string, ((STYLENS, u'font-family-asian'), None): cnv_string, ((STYLENS, u'font-family-complex'), None): cnv_string, ((STYLENS, u'font-family-generic-asian'), None): cnv_string, ((STYLENS, u'font-family-generic'), None): cnv_string, ((STYLENS, u'font-family-generic-complex'), None): cnv_string, ((STYLENS, u'font-independent-line-spacing'), None): cnv_boolean, ((STYLENS, u'font-name-asian'), None): cnv_string, ((STYLENS, u'font-name'), None): cnv_string, ((STYLENS, u'font-name-complex'), None): cnv_string, ((STYLENS, u'font-pitch-asian'), None): cnv_string, ((STYLENS, u'font-pitch'), None): cnv_string, ((STYLENS, u'font-pitch-complex'), None): cnv_string, ((STYLENS, u'font-relief'), None): cnv_string, ((STYLENS, u'font-size-asian'), None): cnv_string, ((STYLENS, u'font-size-complex'), None): cnv_string, ((STYLENS, u'font-size-rel-asian'), None): cnv_length, ((STYLENS, u'font-size-rel'), None): cnv_length, ((STYLENS, u'font-size-rel-complex'), None): cnv_length, ((STYLENS, u'font-style-asian'), None): cnv_string, ((STYLENS, u'font-style-complex'), None): cnv_string, ((STYLENS, u'font-style-name-asian'), None): cnv_string, ((STYLENS, u'font-style-name'), None): cnv_string, ((STYLENS, u'font-style-name-complex'), None): cnv_string, ((STYLENS, u'font-weight-asian'), None): cnv_string, ((STYLENS, u'font-weight-complex'), None): cnv_string, ((STYLENS, u'footnote-max-height'), None): cnv_length, ((STYLENS, u'glyph-orientation-vertical'), None): cnv_string, ((STYLENS, u'height'), None): cnv_string, ((STYLENS, u'horizontal-pos'), None): cnv_string, ((STYLENS, u'horizontal-rel'), None): cnv_string, ((STYLENS, u'justify-single-word'), None): cnv_boolean, ((STYLENS, u'language-asian'), None): cnv_string, ((STYLENS, u'language-complex'), None): cnv_string, ((STYLENS, u'layout-grid-base-height'), None): cnv_length, ((STYLENS, u'layout-grid-color'), None): cnv_string, ((STYLENS, u'layout-grid-display'), None): cnv_boolean, ((STYLENS, u'layout-grid-lines'), None): cnv_string, ((STYLENS, u'layout-grid-mode'), None): cnv_string, ((STYLENS, u'layout-grid-print'), None): cnv_boolean, ((STYLENS, u'layout-grid-ruby-below'), None): cnv_boolean, ((STYLENS, u'layout-grid-ruby-height'), None): cnv_length, ((STYLENS, u'leader-char'), None): cnv_string, ((STYLENS, u'leader-color'), None): cnv_string, ((STYLENS, u'leader-style'), None): cnv_string, ((STYLENS, u'leader-text'), None): cnv_string, ((STYLENS, u'leader-text-style'), None): cnv_StyleNameRef, ((STYLENS, u'leader-type'), None): cnv_string, ((STYLENS, u'leader-width'), None): cnv_string, ((STYLENS, u'legend-expansion-aspect-ratio'), None): cnv_double, ((STYLENS, u'legend-expansion'), None): cnv_string, ((STYLENS, u'length'), None): cnv_positiveInteger, ((STYLENS, u'letter-kerning'), None): cnv_boolean, ((STYLENS, u'line-break'), None): cnv_string, ((STYLENS, u'line-height-at-least'), None): cnv_string, ((STYLENS, u'line-spacing'), None): cnv_length, ((STYLENS, u'line-style'), None): cnv_string, ((STYLENS, u'lines'), None): cnv_positiveInteger, ((STYLENS, u'list-style-name'), None): cnv_StyleNameRef, ((STYLENS, u'master-page-name'), None): cnv_StyleNameRef, ((STYLENS, u'may-break-between-rows'), None): cnv_boolean, ((STYLENS, u'min-row-height'), None): cnv_string, ((STYLENS, u'mirror'), None): cnv_string, ((STYLENS, u'name'), None): cnv_NCName, ((STYLENS, u'name'), (STYLENS, u'font-face')): cnv_string, ((STYLENS, u'next-style-name'), None): cnv_StyleNameRef, ((STYLENS, u'num-format'), None): cnv_string, ((STYLENS, u'num-letter-sync'), None): cnv_boolean, ((STYLENS, u'num-prefix'), None): cnv_string, ((STYLENS, u'num-suffix'), None): cnv_string, ((STYLENS, u'number-wrapped-paragraphs'), None): cnv_string, ((STYLENS, u'overflow-behavior'), None): cnv_string, ((STYLENS, u'page-layout-name'), None): cnv_StyleNameRef, ((STYLENS, u'page-number'), None): cnv_string, ((STYLENS, u'page-usage'), None): cnv_string, ((STYLENS, u'paper-tray-name'), None): cnv_string, ((STYLENS, u'parent-style-name'), None): cnv_StyleNameRef, ((STYLENS, u'position'), (STYLENS, u'tab-stop')): cnv_length, ((STYLENS, u'position'), None): cnv_string, ((STYLENS, u'print'), None): cnv_string, ((STYLENS, u'print-content'), None): cnv_boolean, ((STYLENS, u'print-orientation'), None): cnv_string, ((STYLENS, u'print-page-order'), None): cnv_string, ((STYLENS, u'protect'), (STYLENS, u'section-properties')): cnv_boolean, ((STYLENS, u'protect'), (STYLENS, u'graphic-properties')): cnv_string, # ((STYLENS,u'protect'), None): cnv_boolean, ((STYLENS, u'punctuation-wrap'), None): cnv_string, ((STYLENS, u'register-true'), None): cnv_boolean, ((STYLENS, u'register-truth-ref-style-name'), None): cnv_string, ((STYLENS, u'rel-column-width'), None): cnv_string, ((STYLENS, u'rel-height'), None): cnv_string, ((STYLENS, u'rel-width'), None): cnv_string, ((STYLENS, u'repeat'), None): cnv_string, ((STYLENS, u'repeat-content'), None): cnv_boolean, ((STYLENS, u'rotation-align'), None): cnv_string, ((STYLENS, u'rotation-angle'), None): cnv_string, ((STYLENS, u'row-height'), None): cnv_string, ((STYLENS, u'ruby-align'), None): cnv_string, ((STYLENS, u'ruby-position'), None): cnv_string, ((STYLENS, u'run-through'), None): cnv_string, ((STYLENS, u'scale-to'), None): cnv_string, ((STYLENS, u'scale-to-pages'), None): cnv_string, ((STYLENS, u'script-type'), None): cnv_string, ((STYLENS, u'shadow'), None): cnv_string, ((STYLENS, u'shrink-to-fit'), None): cnv_boolean, ((STYLENS, u'snap-to-layout-grid'), None): cnv_boolean, ((STYLENS, u'style'), None): cnv_string, ((STYLENS, u'style-name'), None): cnv_StyleNameRef, ((STYLENS, u'tab-stop-distance'), None): cnv_string, ((STYLENS, u'table-centering'), None): cnv_string, ((STYLENS, u'text-align-source'), None): cnv_string, ((STYLENS, u'text-autospace'), None): cnv_string, ((STYLENS, u'text-blinking'), None): cnv_boolean, ((STYLENS, u'text-combine'), None): cnv_string, ((STYLENS, u'text-combine-end-char'), None): cnv_string, ((STYLENS, u'text-combine-start-char'), None): cnv_string, ((STYLENS, u'text-emphasize'), None): cnv_string, ((STYLENS, u'text-line-through-color'), None): cnv_string, ((STYLENS, u'text-line-through-mode'), None): cnv_string, ((STYLENS, u'text-line-through-style'), None): cnv_string, ((STYLENS, u'text-line-through-text'), None): cnv_string, ((STYLENS, u'text-line-through-text-style'), None): cnv_string, ((STYLENS, u'text-line-through-type'), None): cnv_string, ((STYLENS, u'text-line-through-width'), None): cnv_string, ((STYLENS, u'text-outline'), None): cnv_boolean, ((STYLENS, u'text-position'), None): cnv_string, ((STYLENS, u'text-rotation-angle'), None): cnv_string, ((STYLENS, u'text-rotation-scale'), None): cnv_string, ((STYLENS, u'text-scale'), None): cnv_string, ((STYLENS, u'text-underline-color'), None): cnv_string, ((STYLENS, u'text-underline-mode'), None): cnv_string, ((STYLENS, u'text-underline-style'), None): cnv_string, ((STYLENS, u'text-underline-type'), None): cnv_string, ((STYLENS, u'text-underline-width'), None): cnv_string, ((STYLENS, u'type'), None): cnv_string, ((STYLENS, u'use-optimal-column-width'), None): cnv_boolean, ((STYLENS, u'use-optimal-row-height'), None): cnv_boolean, ((STYLENS, u'use-window-font-color'), None): cnv_boolean, ((STYLENS, u'vertical-align'), None): cnv_string, ((STYLENS, u'vertical-pos'), None): cnv_string, ((STYLENS, u'vertical-rel'), None): cnv_string, ((STYLENS, u'volatile'), None): cnv_boolean, ((STYLENS, u'width'), None): cnv_string, ((STYLENS, u'wrap'), None): cnv_string, ((STYLENS, u'wrap-contour'), None): cnv_boolean, ((STYLENS, u'wrap-contour-mode'), None): cnv_string, ((STYLENS, u'wrap-dynamic-threshold'), None): cnv_length, ((STYLENS, u'writing-mode-automatic'), None): cnv_boolean, ((STYLENS, u'writing-mode'), None): cnv_string, ((SVGNS, u'accent-height'), None): cnv_integer, ((SVGNS, u'alphabetic'), None): cnv_integer, ((SVGNS, u'ascent'), None): cnv_integer, ((SVGNS, u'bbox'), None): cnv_string, ((SVGNS, u'cap-height'), None): cnv_integer, ((SVGNS, u'cx'), None): cnv_string, ((SVGNS, u'cy'), None): cnv_string, ((SVGNS, u'd'), None): cnv_string, ((SVGNS, u'descent'), None): cnv_integer, ((SVGNS, u'fill-rule'), None): cnv_string, ((SVGNS, u'font-family'), None): cnv_string, ((SVGNS, u'font-size'), None): cnv_string, ((SVGNS, u'font-stretch'), None): cnv_string, ((SVGNS, u'font-style'), None): cnv_string, ((SVGNS, u'font-variant'), None): cnv_string, ((SVGNS, u'font-weight'), None): cnv_string, ((SVGNS, u'fx'), None): cnv_string, ((SVGNS, u'fy'), None): cnv_string, ((SVGNS, u'gradientTransform'), None): cnv_string, ((SVGNS, u'gradientUnits'), None): cnv_string, ((SVGNS, u'hanging'), None): cnv_integer, ((SVGNS, u'height'), None): cnv_length, ((SVGNS, u'ideographic'), None): cnv_integer, ((SVGNS, u'mathematical'), None): cnv_integer, ((SVGNS, u'name'), None): cnv_string, ((SVGNS, u'offset'), None): cnv_string, ((SVGNS, u'origin'), None): cnv_string, ((SVGNS, u'overline-position'), None): cnv_integer, ((SVGNS, u'overline-thickness'), None): cnv_integer, ((SVGNS, u'panose-1'), None): cnv_string, ((SVGNS, u'path'), None): cnv_string, ((SVGNS, u'r'), None): cnv_length, ((SVGNS, u'rx'), None): cnv_length, ((SVGNS, u'ry'), None): cnv_length, ((SVGNS, u'slope'), None): cnv_integer, ((SVGNS, u'spreadMethod'), None): cnv_string, ((SVGNS, u'stemh'), None): cnv_integer, ((SVGNS, u'stemv'), None): cnv_integer, ((SVGNS, u'stop-color'), None): cnv_string, ((SVGNS, u'stop-opacity'), None): cnv_double, ((SVGNS, u'strikethrough-position'), None): cnv_integer, ((SVGNS, u'strikethrough-thickness'), None): cnv_integer, ((SVGNS, u'string'), None): cnv_string, ((SVGNS, u'stroke-color'), None): cnv_string, ((SVGNS, u'stroke-opacity'), None): cnv_string, ((SVGNS, u'stroke-width'), None): cnv_length, ((SVGNS, u'type'), None): cnv_string, ((SVGNS, u'underline-position'), None): cnv_integer, ((SVGNS, u'underline-thickness'), None): cnv_integer, ((SVGNS, u'unicode-range'), None): cnv_string, ((SVGNS, u'units-per-em'), None): cnv_integer, ((SVGNS, u'v-alphabetic'), None): cnv_integer, ((SVGNS, u'v-hanging'), None): cnv_integer, ((SVGNS, u'v-ideographic'), None): cnv_integer, ((SVGNS, u'v-mathematical'), None): cnv_integer, ((SVGNS, u'viewBox'), None): cnv_viewbox, ((SVGNS, u'width'), None): cnv_length, ((SVGNS, u'widths'), None): cnv_string, ((SVGNS, u'x'), None): cnv_length, ((SVGNS, u'x-height'), None): cnv_integer, ((SVGNS, u'x1'), None): cnv_lengthorpercent, ((SVGNS, u'x2'), None): cnv_lengthorpercent, ((SVGNS, u'y'), None): cnv_length, ((SVGNS, u'y1'), None): cnv_lengthorpercent, ((SVGNS, u'y2'), None): cnv_lengthorpercent, ((TABLENS, u'acceptance-state'), None): cnv_string, ((TABLENS, u'add-empty-lines'), None): cnv_boolean, ((TABLENS, u'algorithm'), None): cnv_formula, ((TABLENS, u'align'), None): cnv_string, ((TABLENS, u'allow-empty-cell'), None): cnv_boolean, ((TABLENS, u'application-data'), None): cnv_string, ((TABLENS, u'automatic-find-labels'), None): cnv_boolean, ((TABLENS, u'base-cell-address'), None): cnv_string, ((TABLENS, u'bind-styles-to-content'), None): cnv_boolean, ((TABLENS, u'border-color'), None): cnv_string, ((TABLENS, u'border-model'), None): cnv_string, ((TABLENS, u'buttons'), None): cnv_string, ((TABLENS, u'buttons'), None): cnv_string, ((TABLENS, u'case-sensitive'), None): cnv_boolean, ((TABLENS, u'case-sensitive'), None): cnv_string, ((TABLENS, u'cell-address'), None): cnv_string, ((TABLENS, u'cell-range-address'), None): cnv_string, ((TABLENS, u'cell-range-address'), None): cnv_string, ((TABLENS, u'cell-range'), None): cnv_string, ((TABLENS, u'column'), None): cnv_integer, ((TABLENS, u'comment'), None): cnv_string, ((TABLENS, u'condition'), None): cnv_formula, ((TABLENS, u'condition-source'), None): cnv_string, ((TABLENS, u'condition-source-range-address'), None): cnv_string, ((TABLENS, u'contains-error'), None): cnv_boolean, ((TABLENS, u'contains-header'), None): cnv_boolean, ((TABLENS, u'content-validation-name'), None): cnv_string, ((TABLENS, u'copy-back'), None): cnv_boolean, ((TABLENS, u'copy-formulas'), None): cnv_boolean, ((TABLENS, u'copy-styles'), None): cnv_boolean, ((TABLENS, u'count'), None): cnv_positiveInteger, ((TABLENS, u'country'), None): cnv_token, ((TABLENS, u'data-cell-range-address'), None): cnv_string, ((TABLENS, u'data-field'), None): cnv_string, ((TABLENS, u'data-type'), None): cnv_string, ((TABLENS, u'database-name'), None): cnv_string, ((TABLENS, u'database-table-name'), None): cnv_string, ((TABLENS, u'date-end'), None): cnv_string, ((TABLENS, u'date-start'), None): cnv_string, ((TABLENS, u'date-value'), None): cnv_date, ((TABLENS, u'default-cell-style-name'), None): cnv_StyleNameRef, ((TABLENS, u'direction'), None): cnv_string, ((TABLENS, u'display-border'), None): cnv_boolean, ((TABLENS, u'display'), None): cnv_boolean, ((TABLENS, u'display-duplicates'), None): cnv_boolean, ((TABLENS, u'display-filter-buttons'), None): cnv_boolean, ((TABLENS, u'display-list'), None): cnv_string, ((TABLENS, u'display-member-mode'), None): cnv_string, ((TABLENS, u'drill-down-on-double-click'), None): cnv_boolean, ((TABLENS, u'enabled'), None): cnv_boolean, ((TABLENS, u'end-cell-address'), None): cnv_string, ((TABLENS, u'end'), None): cnv_string, ((TABLENS, u'end-column'), None): cnv_integer, ((TABLENS, u'end-position'), None): cnv_integer, ((TABLENS, u'end-row'), None): cnv_integer, ((TABLENS, u'end-table'), None): cnv_integer, ((TABLENS, u'end-x'), None): cnv_length, ((TABLENS, u'end-y'), None): cnv_length, ((TABLENS, u'execute'), None): cnv_boolean, ((TABLENS, u'expression'), None): cnv_formula, ((TABLENS, u'field-name'), None): cnv_string, ((TABLENS, u'field-number'), None): cnv_nonNegativeInteger, ((TABLENS, u'field-number'), None): cnv_string, ((TABLENS, u'filter-name'), None): cnv_string, ((TABLENS, u'filter-options'), None): cnv_string, ((TABLENS, u'formula'), None): cnv_formula, ((TABLENS, u'function'), None): cnv_string, ((TABLENS, u'function'), None): cnv_string, ((TABLENS, u'grand-total'), None): cnv_string, ((TABLENS, u'group-by-field-number'), None): cnv_nonNegativeInteger, ((TABLENS, u'grouped-by'), None): cnv_string, ((TABLENS, u'has-persistent-data'), None): cnv_boolean, ((TABLENS, u'id'), None): cnv_string, ((TABLENS, u'identify-categories'), None): cnv_boolean, ((TABLENS, u'ignore-empty-rows'), None): cnv_boolean, ((TABLENS, u'index'), None): cnv_nonNegativeInteger, ((TABLENS, u'is-active'), None): cnv_boolean, ((TABLENS, u'is-data-layout-field'), None): cnv_string, ((TABLENS, u'is-selection'), None): cnv_boolean, ((TABLENS, u'is-sub-table'), None): cnv_boolean, ((TABLENS, u'label-cell-range-address'), None): cnv_string, ((TABLENS, u'language'), None): cnv_token, ((TABLENS, u'language'), None): cnv_token, ((TABLENS, u'last-column-spanned'), None): cnv_positiveInteger, ((TABLENS, u'last-row-spanned'), None): cnv_positiveInteger, ((TABLENS, u'layout-mode'), None): cnv_string, ((TABLENS, u'link-to-source-data'), None): cnv_boolean, ((TABLENS, u'marked-invalid'), None): cnv_boolean, ((TABLENS, u'matrix-covered'), None): cnv_boolean, ((TABLENS, u'maximum-difference'), None): cnv_double, ((TABLENS, u'member-count'), None): cnv_nonNegativeInteger, ((TABLENS, u'member-name'), None): cnv_string, ((TABLENS, u'member-type'), None): cnv_string, ((TABLENS, u'message-type'), None): cnv_string, ((TABLENS, u'mode'), None): cnv_string, ((TABLENS, u'multi-deletion-spanned'), None): cnv_integer, ((TABLENS, u'name'), None): cnv_string, ((TABLENS, u'name'), None): cnv_string, ((TABLENS, u'null-year'), None): cnv_positiveInteger, ((TABLENS, u'number-columns-repeated'), None): cnv_positiveInteger, ((TABLENS, u'number-columns-spanned'), None): cnv_positiveInteger, ((TABLENS, u'number-matrix-columns-spanned'), None): cnv_positiveInteger, ((TABLENS, u'number-matrix-rows-spanned'), None): cnv_positiveInteger, ((TABLENS, u'number-rows-repeated'), None): cnv_positiveInteger, ((TABLENS, u'number-rows-spanned'), None): cnv_positiveInteger, ((TABLENS, u'object-name'), None): cnv_string, ((TABLENS, u'on-update-keep-size'), None): cnv_boolean, ((TABLENS, u'on-update-keep-styles'), None): cnv_boolean, ((TABLENS, u'operator'), None): cnv_string, ((TABLENS, u'operator'), None): cnv_string, ((TABLENS, u'order'), None): cnv_string, ((TABLENS, u'orientation'), None): cnv_string, ((TABLENS, u'orientation'), None): cnv_string, ((TABLENS, u'page-breaks-on-group-change'), None): cnv_boolean, ((TABLENS, u'parse-sql-statement'), None): cnv_boolean, ((TABLENS, u'password'), None): cnv_string, ((TABLENS, u'position'), None): cnv_integer, ((TABLENS, u'precision-as-shown'), None): cnv_boolean, ((TABLENS, u'print'), None): cnv_boolean, ((TABLENS, u'print-ranges'), None): cnv_string, ((TABLENS, u'protect'), None): cnv_boolean, ((TABLENS, u'protected'), None): cnv_boolean, ((TABLENS, u'protection-key'), None): cnv_string, ((TABLENS, u'query-name'), None): cnv_string, ((TABLENS, u'range-usable-as'), None): cnv_string, ((TABLENS, u'refresh-delay'), None): cnv_boolean, ((TABLENS, u'refresh-delay'), None): cnv_duration, ((TABLENS, u'rejecting-change-id'), None): cnv_string, ((TABLENS, u'row'), None): cnv_integer, ((TABLENS, u'scenario-ranges'), None): cnv_string, ((TABLENS, u'search-criteria-must-apply-to-whole-cell'), None): cnv_boolean, ((TABLENS, u'selected-page'), None): cnv_string, ((TABLENS, u'show-details'), None): cnv_boolean, ((TABLENS, u'show-empty'), None): cnv_boolean, ((TABLENS, u'show-empty'), None): cnv_string, ((TABLENS, u'show-filter-button'), None): cnv_boolean, ((TABLENS, u'sort-mode'), None): cnv_string, ((TABLENS, u'source-cell-range-addresses'), None): cnv_string, ((TABLENS, u'source-cell-range-addresses'), None): cnv_string, ((TABLENS, u'source-field-name'), None): cnv_string, ((TABLENS, u'source-field-name'), None): cnv_string, ((TABLENS, u'source-name'), None): cnv_string, ((TABLENS, u'sql-statement'), None): cnv_string, ((TABLENS, u'start'), None): cnv_string, ((TABLENS, u'start-column'), None): cnv_integer, ((TABLENS, u'start-position'), None): cnv_integer, ((TABLENS, u'start-row'), None): cnv_integer, ((TABLENS, u'start-table'), None): cnv_integer, ((TABLENS, u'status'), None): cnv_string, ((TABLENS, u'step'), None): cnv_double, ((TABLENS, u'steps'), None): cnv_positiveInteger, ((TABLENS, u'structure-protected'), None): cnv_boolean, ((TABLENS, u'style-name'), None): cnv_StyleNameRef, ((TABLENS, u'table-background'), None): cnv_boolean, ((TABLENS, u'table'), None): cnv_integer, ((TABLENS, u'table-name'), None): cnv_string, ((TABLENS, u'target-cell-address'), None): cnv_string, ((TABLENS, u'target-cell-address'), None): cnv_string, ((TABLENS, u'target-range-address'), None): cnv_string, ((TABLENS, u'target-range-address'), None): cnv_string, ((TABLENS, u'title'), None): cnv_string, ((TABLENS, u'track-changes'), None): cnv_boolean, ((TABLENS, u'type'), None): cnv_string, ((TABLENS, u'use-labels'), None): cnv_string, ((TABLENS, u'use-regular-expressions'), None): cnv_boolean, ((TABLENS, u'used-hierarchy'), None): cnv_integer, ((TABLENS, u'user-name'), None): cnv_string, ((TABLENS, u'value'), None): cnv_string, ((TABLENS, u'value'), None): cnv_string, ((TABLENS, u'value-type'), None): cnv_string, ((TABLENS, u'visibility'), None): cnv_string, ((TEXTNS, u'active'), None): cnv_boolean, ((TEXTNS, u'address'), None): cnv_string, ((TEXTNS, u'alphabetical-separators'), None): cnv_boolean, ((TEXTNS, u'anchor-page-number'), None): cnv_positiveInteger, ((TEXTNS, u'anchor-type'), None): cnv_string, ((TEXTNS, u'animation'), None): cnv_string, ((TEXTNS, u'animation-delay'), None): cnv_string, ((TEXTNS, u'animation-direction'), None): cnv_string, ((TEXTNS, u'animation-repeat'), None): cnv_string, ((TEXTNS, u'animation-start-inside'), None): cnv_boolean, ((TEXTNS, u'animation-steps'), None): cnv_length, ((TEXTNS, u'animation-stop-inside'), None): cnv_boolean, ((TEXTNS, u'annote'), None): cnv_string, ((TEXTNS, u'author'), None): cnv_string, ((TEXTNS, u'bibliography-data-field'), None): cnv_string, ((TEXTNS, u'bibliography-type'), None): cnv_string, ((TEXTNS, u'booktitle'), None): cnv_string, ((TEXTNS, u'bullet-char'), None): cnv_string, ((TEXTNS, u'bullet-relative-size'), None): cnv_string, ((TEXTNS, u'c'), None): cnv_nonNegativeInteger, ((TEXTNS, u'capitalize-entries'), None): cnv_boolean, ((TEXTNS, u'caption-sequence-format'), None): cnv_string, ((TEXTNS, u'caption-sequence-name'), None): cnv_string, ((TEXTNS, u'change-id'), None): cnv_IDREF, ((TEXTNS, u'chapter'), None): cnv_string, ((TEXTNS, u'citation-body-style-name'), None): cnv_StyleNameRef, ((TEXTNS, u'citation-style-name'), None): cnv_StyleNameRef, ((TEXTNS, u'class-names'), None): cnv_NCNames, ((TEXTNS, u'column-name'), None): cnv_string, ((TEXTNS, u'combine-entries'), None): cnv_boolean, ((TEXTNS, u'combine-entries-with-dash'), None): cnv_boolean, ((TEXTNS, u'combine-entries-with-pp'), None): cnv_boolean, ((TEXTNS, u'comma-separated'), None): cnv_boolean, ((TEXTNS, u'cond-style-name'), None): cnv_StyleNameRef, ((TEXTNS, u'condition'), None): cnv_formula, ((TEXTNS, u'connection-name'), None): cnv_string, ((TEXTNS, u'consecutive-numbering'), None): cnv_boolean, ((TEXTNS, u'continue-numbering'), None): cnv_boolean, ((TEXTNS, u'copy-outline-levels'), None): cnv_boolean, ((TEXTNS, u'count-empty-lines'), None): cnv_boolean, ((TEXTNS, u'count-in-text-boxes'), None): cnv_boolean, ((TEXTNS, u'current-value'), None): cnv_boolean, ((TEXTNS, u'custom1'), None): cnv_string, ((TEXTNS, u'custom2'), None): cnv_string, ((TEXTNS, u'custom3'), None): cnv_string, ((TEXTNS, u'custom4'), None): cnv_string, ((TEXTNS, u'custom5'), None): cnv_string, ((TEXTNS, u'database-name'), None): cnv_string, ((TEXTNS, u'date-adjust'), None): cnv_duration, ((TEXTNS, u'date-value'), None): cnv_date, # ((TEXTNS,u'date-value'), None): cnv_dateTime, ((TEXTNS, u'default-style-name'), None): cnv_StyleNameRef, ((TEXTNS, u'description'), None): cnv_string, ((TEXTNS, u'display'), None): cnv_string, ((TEXTNS, u'display-levels'), None): cnv_positiveInteger, ((TEXTNS, u'display-outline-level'), None): cnv_nonNegativeInteger, ((TEXTNS, u'dont-balance-text-columns'), None): cnv_boolean, ((TEXTNS, u'duration'), None): cnv_duration, ((TEXTNS, u'edition'), None): cnv_string, ((TEXTNS, u'editor'), None): cnv_string, ((TEXTNS, u'filter-name'), None): cnv_string, ((TEXTNS, u'first-row-end-column'), None): cnv_string, ((TEXTNS, u'first-row-start-column'), None): cnv_string, ((TEXTNS, u'fixed'), None): cnv_boolean, ((TEXTNS, u'footnotes-position'), None): cnv_string, ((TEXTNS, u'formula'), None): cnv_formula, ((TEXTNS, u'global'), None): cnv_boolean, ((TEXTNS, u'howpublished'), None): cnv_string, ((TEXTNS, u'id'), None): cnv_ID, # ((TEXTNS,u'id'), None): cnv_string, ((TEXTNS, u'identifier'), None): cnv_string, ((TEXTNS, u'ignore-case'), None): cnv_boolean, ((TEXTNS, u'increment'), None): cnv_nonNegativeInteger, ((TEXTNS, u'index-name'), None): cnv_string, ((TEXTNS, u'index-scope'), None): cnv_string, ((TEXTNS, u'institution'), None): cnv_string, ((TEXTNS, u'is-hidden'), None): cnv_boolean, ((TEXTNS, u'is-list-header'), None): cnv_boolean, ((TEXTNS, u'isbn'), None): cnv_string, ((TEXTNS, u'issn'), None): cnv_string, ((TEXTNS, u'issn'), None): cnv_string, ((TEXTNS, u'journal'), None): cnv_string, ((TEXTNS, u'key'), None): cnv_string, ((TEXTNS, u'key1'), None): cnv_string, ((TEXTNS, u'key1-phonetic'), None): cnv_string, ((TEXTNS, u'key2'), None): cnv_string, ((TEXTNS, u'key2-phonetic'), None): cnv_string, ((TEXTNS, u'kind'), None): cnv_string, ((TEXTNS, u'label'), None): cnv_string, ((TEXTNS, u'last-row-end-column'), None): cnv_string, ((TEXTNS, u'last-row-start-column'), None): cnv_string, ((TEXTNS, u'level'), None): cnv_positiveInteger, ((TEXTNS, u'line-break'), None): cnv_boolean, ((TEXTNS, u'line-number'), None): cnv_string, ((TEXTNS, u'main-entry'), None): cnv_boolean, ((TEXTNS, u'main-entry-style-name'), None): cnv_StyleNameRef, ((TEXTNS, u'master-page-name'), None): cnv_StyleNameRef, ((TEXTNS, u'min-label-distance'), None): cnv_string, ((TEXTNS, u'min-label-width'), None): cnv_string, ((TEXTNS, u'month'), None): cnv_string, ((TEXTNS, u'name'), None): cnv_string, ((TEXTNS, u'note-class'), None): cnv_textnoteclass, ((TEXTNS, u'note'), None): cnv_string, ((TEXTNS, u'number'), None): cnv_string, ((TEXTNS, u'number-lines'), None): cnv_boolean, ((TEXTNS, u'number-position'), None): cnv_string, ((TEXTNS, u'numbered-entries'), None): cnv_boolean, ((TEXTNS, u'offset'), None): cnv_string, ((TEXTNS, u'organizations'), None): cnv_string, ((TEXTNS, u'outline-level'), None): cnv_string, ((TEXTNS, u'page-adjust'), None): cnv_integer, ((TEXTNS, u'pages'), None): cnv_string, ((TEXTNS, u'paragraph-style-name'), None): cnv_StyleNameRef, ((TEXTNS, u'placeholder-type'), None): cnv_string, ((TEXTNS, u'prefix'), None): cnv_string, ((TEXTNS, u'protected'), None): cnv_boolean, ((TEXTNS, u'protection-key'), None): cnv_string, ((TEXTNS, u'publisher'), None): cnv_string, ((TEXTNS, u'ref-name'), None): cnv_string, ((TEXTNS, u'reference-format'), None): cnv_string, ((TEXTNS, u'relative-tab-stop-position'), None): cnv_boolean, ((TEXTNS, u'report-type'), None): cnv_string, ((TEXTNS, u'restart-numbering'), None): cnv_boolean, ((TEXTNS, u'restart-on-page'), None): cnv_boolean, ((TEXTNS, u'row-number'), None): cnv_nonNegativeInteger, ((TEXTNS, u'school'), None): cnv_string, ((TEXTNS, u'section-name'), None): cnv_string, ((TEXTNS, u'select-page'), None): cnv_string, ((TEXTNS, u'separation-character'), None): cnv_string, ((TEXTNS, u'series'), None): cnv_string, ((TEXTNS, u'sort-algorithm'), None): cnv_string, ((TEXTNS, u'sort-ascending'), None): cnv_boolean, ((TEXTNS, u'sort-by-position'), None): cnv_boolean, ((TEXTNS, u'space-before'), None): cnv_string, ((TEXTNS, u'start-numbering-at'), None): cnv_string, ((TEXTNS, u'start-value'), None): cnv_nonNegativeInteger, ((TEXTNS, u'start-value'), None): cnv_positiveInteger, ((TEXTNS, u'string-value'), None): cnv_string, ((TEXTNS, u'string-value-if-false'), None): cnv_string, ((TEXTNS, u'string-value-if-true'), None): cnv_string, ((TEXTNS, u'string-value-phonetic'), None): cnv_string, ((TEXTNS, u'style-name'), None): cnv_StyleNameRef, ((TEXTNS, u'suffix'), None): cnv_string, ((TEXTNS, u'tab-ref'), None): cnv_nonNegativeInteger, ((TEXTNS, u'table-name'), None): cnv_string, ((TEXTNS, u'table-type'), None): cnv_string, ((TEXTNS, u'time-adjust'), None): cnv_duration, ((TEXTNS, u'time-value'), None): cnv_dateTime, ((TEXTNS, u'time-value'), None): cnv_time, ((TEXTNS, u'title'), None): cnv_string, ((TEXTNS, u'track-changes'), None): cnv_boolean, ((TEXTNS, u'url'), None): cnv_string, ((TEXTNS, u'use-caption'), None): cnv_boolean, ((TEXTNS, u'use-chart-objects'), None): cnv_boolean, ((TEXTNS, u'use-draw-objects'), None): cnv_boolean, ((TEXTNS, u'use-floating-frames'), None): cnv_boolean, ((TEXTNS, u'use-graphics'), None): cnv_boolean, ((TEXTNS, u'use-index-marks'), None): cnv_boolean, ((TEXTNS, u'use-index-source-styles'), None): cnv_boolean, ((TEXTNS, u'use-keys-as-entries'), None): cnv_boolean, ((TEXTNS, u'use-math-objects'), None): cnv_boolean, ((TEXTNS, u'use-objects'), None): cnv_boolean, ((TEXTNS, u'use-other-objects'), None): cnv_boolean, ((TEXTNS, u'use-outline-level'), None): cnv_boolean, ((TEXTNS, u'use-soft-page-breaks'), None): cnv_boolean, ((TEXTNS, u'use-spreadsheet-objects'), None): cnv_boolean, ((TEXTNS, u'use-tables'), None): cnv_boolean, ((TEXTNS, u'value'), None): cnv_nonNegativeInteger, ((TEXTNS, u'visited-style-name'), None): cnv_StyleNameRef, ((TEXTNS, u'volume'), None): cnv_string, ((TEXTNS, u'year'), None): cnv_string, ((XFORMSNS, u'bind'), None): cnv_string, ((XLINKNS, u'actuate'), None): cnv_string, ((XLINKNS, u'href'), None): cnv_anyURI, ((XLINKNS, u'show'), None): cnv_xlinkshow, ((XLINKNS, u'title'), None): cnv_string, ((XLINKNS, u'type'), None): cnv_string, } class AttrConverters: def convert(self, attribute, value, element): """ Based on the element, figures out how to check/convert the attribute value All values are converted to string """ conversion = attrconverters.get((attribute, element.qname), None) if conversion is not None: return conversion(attribute, value, element) else: conversion = attrconverters.get((attribute, None), None) if conversion is not None: return conversion(attribute, value, element) return unicode(value)
walterbender/turtleconfusion
TurtleArt/util/odf/attrconverters.py
Python
mit
75,191
# This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "$Format:%d$" git_full = "$Format:%H$" git_date = "$Format:%ci$" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "" cfg.parentdir_prefix = "" cfg.versionfile_source = "neurodocker/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen( [c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None), ) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return { "version": dirname[len(parentdir_prefix) :], "full-revisionid": None, "dirty": False, "error": None, "date": None, } else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print( "Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix) ) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r"\d", r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix) :] if verbose: print("picking %s" % r) return { "version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date, } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return { "version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None, } @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command( GITS, [ "describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix, ], cwd=root, ) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[: git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( full_tag, tag_prefix, ) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix) :] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ 0 ].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return { "version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None, } if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return { "version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date"), } def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split("/"): root = os.path.dirname(root) except NameError: return { "version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None, } try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return { "version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None, }
kaczmarj/neurodocker
neurodocker/_version.py
Python
apache-2.0
18,468
""" :copyright: (c) 2011 Local Projects, all rights reserved :license: Affero GNU GPL v3, see LICENSE for more details. """ from framework.controller import * import framework.util as util import giveaminute.project as mProject import giveaminute.idea as mIdea import giveaminute.projectResource as mProjectResource import giveaminute.messaging as mMessaging import giveaminute.models as models import helpers.censor import json import re import datetime class Project(Controller): def GET(self, action=None, param0=None, param1=None): if (action == 'resource'): if (param0 == 'info'): return self.getResourceInfo() else: return self.not_found() elif (action == 'resources'): if (param0 == 'related'): return self.getRelatedResources() else: return self.getResourcesAndLinks() elif (action == 'messages'): return self.getMessages() elif (action == 'featured'): return self.getFeaturedProjects() elif (action == 'small'): return self.getProjectData() elif (action == 'rss'): return self.showConversationRSS(param0) else: return self.showProject(action) def POST(self, action=None, param0=None, param1=None): if (action == 'join'): return self.join() elif (action == 'endorse'): if (param0 == 'remove'): return self.removeEndorsement() else: return self.endorse() elif (action == 'link'): if (param0 == 'add'): return self.addLink() elif (param0 == 'remove'): return self.removeLink() else: return self.not_found() elif (action == 'resource'): if (param0 == 'add'): return self.addResource() elif (param0 == 'remove'): return self.removeResource() else: return self.not_found() elif (action == 'message'): if (param0 == 'add'): return self.addMessage() elif (param0 == 'remove'): return self.removeMessage() else: return self.not_found() elif (action == 'tag'): if (param0 == 'add'): return self.addKeywords() elif (param0 == 'remove'): return self.removeKeyword() else: return self.not_found() elif (action == 'invite'): return self.invite() elif (action == 'leave'): return self.leaveProject() elif (action == 'user'): if (param0 == 'remove'): return self.removeUser() elif (param0 == 'admin'): if (param1 == 'add'): return self.setAdmin(True) elif (param1 == 'remove'): return self.setAdmin(False) else: return self.not_found() else: return self.not_found() elif (action == 'photo'): return self.updateImage() elif (action == 'description'): return self.updateDescription() elif (action == 'title'): return self.updateTitle() else: return self.not_found() def getProject(self, project_id): """Get the SQL Alchemy project object""" project = self.orm.query(models.Project).get(project_id) return project def showProject(self, projectId): """The main project detail view controller.""" if (projectId): project = mProject.Project(self.db, projectId) if (project.data): projDictionary = project.getFullDictionary() project_user = self.getProjectUser(projectId) self.template_data['project_user'] = dict(data = project_user, json = json.dumps(project_user)) project_proxy = self.getProject(projectId) project_proxy.json = json.dumps(projDictionary) project_proxy.data = projDictionary self.template_data['project'] = project_proxy import giveaminute.filters as gam_filters gam_filters.register_filters() return self.render('project') else: return self.not_found() else: return self.not_found() def showConversationRSS(self, projectId): if (projectId): project = mProject.Project(self.db, projectId) projDictionary = project.getFullDictionary() self.template_data['project'] = dict(json = json.dumps(projDictionary), data = projDictionary) msgs = self.template_data['project']['data']['info']['messages']['items'] for item in msgs: item['created'] = datetime.datetime.strptime(item['created'], '%Y-%m-%d %H:%M:%S').strftime('%a, %d %b %Y %H:%M:%S EST') return self.render('project/conversation_rss', suffix='xml.rss', content_type = 'application/rss+xml') else: return self.not_found() def getProjectUser(self, projectId): projectUser = dict(is_project_admin = False, is_member = False, is_invited_by_idea = False, can_endorse = False) if (self.user): sqlInvited = """select pi.project_id from project_invite pi inner join idea i on i.idea_id = pi.invitee_idea_id where pi.project_id = $projectId and i.user_id = $userId limit 1""" dataInvited = list(self.db.query(sqlInvited, {'userId':self.user.id, 'email':self.user.email, 'projectId':projectId})) projectUser['is_invited_by_idea'] = (len(dataInvited) == 1) sqlMember = "select is_project_admin from project__user where user_id = $userId and project_id = $projectId limit 1" dataMember = list(self.db.query(sqlMember, {'userId':self.user.id, 'projectId':projectId})) if (len(dataMember)== 1): projectUser['is_member'] = True if (dataMember[0].is_project_admin == 1): projectUser['is_project_admin'] = True # # # if (self.user.isLeader): sqlEndorse = "select user_id from project_endorsement where project_id = $projectId and user_id = $userId limit 1" dataEndorse = list(self.db.query(sqlEndorse, {'userId':self.user.id, 'projectId':projectId})) projectUser['can_endorse'] = (len(dataEndorse) == 0) else: projectUser['can_endorse'] = False return projectUser def join(self): projectId = self.request('project_id') if (not self.user): log.error("*** join submitted w/o logged in user") return False elif (not projectId): log.error("*** join submitted w/o logged project id") return False else: isJoined = mProject.join(self.db, projectId, self.user.id) if (isJoined): project = mProject.Project(self.db, projectId) # add a message to the queue about the join message = 'New Member! Your project now has %s total!' % project.data.num_members # email admin if (not mMessaging.emailProjectJoin(project.data.owner_email, projectId, project.data.title, self.user.id, mProject.userNameDisplay(self.user.firstName, self.user.lastName, self.user.affiliation, mProject.isFullLastName(self.user.groupMembershipBitmask)))): log.error("*** couldn't email admin on user_id = %s joining project %s" % (self.user.id, projectId)) if (not mProject.addMessage(self.db, projectId, message, 'join', self.user.id)): log.error("*** new message not created for user %s on joining project %s" % (self.user.id, projectId)) return isJoined def invite(self): projectId = self.request('project_id') ideaId = self.request('idea_id') emails = self.request('email_list') message = self.request('message') if (not self.user): log.error("*** invite w/o logged in user") return False elif (not projectId): log.error("***invite w/o project id") return False else: if (ideaId): return mProject.inviteByIdea(self.db, projectId, ideaId, message, self.user) elif (emails): return mProject.inviteByEmail(self.db, projectId, emails.split(','), message, self.user) else: log.error("*** invite w/o idea or email") return False def endorse(self): projectId = self.request('project_id') if (not self.user or not self.user.isLeader): log.error("*** endorsement submitted w/o logged in user or with non-project leader user account") return False else: isEndorsed = mProject.endorse(self.db, projectId, self.user.id) if (isEndorsed): # TODO do we need to get the whole project here? project = mProject.Project(self.db, projectId) # email admin if (not mMessaging.emailProjectEndorsement(project.data.owner_email, project.data.title, "%s %s" % (self.user.firstName, self.user.lastName))): log.error("*** couldn't email admin on user_id = %s endorsing project %s" % (self.user.id, projectId)) # add a message to the queue about the join message = 'Congratulations! Your group has now been endorsed by %s %s.' % (self.user.firstName, self.user.lastName) if (not mProject.addMessage(self.db, projectId, message, 'endorsement', self.user.id)): log.error("*** new message not created for user %s on endorsing project %s" % (self.user.id, projectId)) return isEndorsed def removeEndorsement(self): projectId = self.request('project_id') userId = util.try_f(int, self.request('user_id')) if (self.user and ((self.user.isLeader and self.user.id == userId) or self.user.isAdmin)): isRemoved = mProject.removeEndorsement(self.db, projectId, userId) # if successfully removed, remove messages as well if (isRemoved): mProject.removeEndorsementMessage(self.db, projectId, userId) return isRemoved else: log.error("*** attempt to remove endorsement w/o proper credentials") return False def addLink(self): if (self.request('main_text')): return False projectId = self.request('project_id') title = self.request('title') url = util.makeUrlAbsolute(self.request('url')) if self.request('url') else None if (not projectId or util.strNullOrEmpty(title) or util.strNullOrEmpty(url)): log.error("*** link submitted w/o id, title, or url") return False else: return mProject.addLinkToProject(self.db, projectId, title, url) def removeLink(self): projectId = self.request('project_id') linkId = self.request('link_id') if (not linkId): log.error("*** link removal submitted missing an id") return False else: if (not self.user.isAdmin and not self.user.isModerator and not self.user.isProjectAdmin(projectId)): log.warning("*** unauthorized link removal attempt by user_id = %s" % self.user.id) return False else: return mProject.setLinkIsActive(self.db, linkId, 0) def addResource(self): projectId = self.request('project_id') projectResourceId = self.request('project_resource_id') if (not projectId or not projectResourceId): log.error("*** resource submitted missing an id") return False else: if (mProject.addResourceToProject(self.db, projectId, projectResourceId)): # TODO do we need to get the whole project here? project = mProject.Project(self.db, projectId) res = mProjectResource.ProjectResource(self.db, projectResourceId) if (not mMessaging.emailResourceNotification(res.data.contact_email, projectId, project.data.title, project.data.description, res.data.title)): log.error("*** couldn't email resource id %s" % projectResourceId) else: log.error("*** couldn't add resource %s to project %s" % (projectResourceId, projectId)) return False def removeResource(self): projectId = self.request('project_id') projectResourceId = self.request('project_resource_id') if (not projectId or not projectResourceId): log.error("*** resource removal submitted missing an id") return False else: if (not self.user.isAdmin and not self.user.isModerator and not self.user.isProjectAdmin(projectId)): log.warning("*** unauthorized resource removal attempt by user_id = %s" % self.user.id) return False else: return mProject.removeResourceFromProject(self.db, projectId, projectResourceId) def getResourceInfo(self): projectResourceId = self.request('project_resource_id') info = None resource = mProjectResource.ProjectResource(self.db, projectResourceId) if (resource.data): info = self.json(resource.getFullDictionary()) return info def getResourcesAndLinks(self): projectId = self.request('project_id') data = dict(links = mProject.getLinks(self.db, projectId), resources = mProject.getResources(self.db, projectId)) return self.json(data) def getRelatedResources(self): projectId = self.request('project_id') resources = [] project = mProject.Project(self.db, projectId) keywords = project.data.keywords.split() locationId = project.data.location_id resources = mProjectResource.searchProjectResources(self.db, keywords, locationId) obj = dict(resources = resources) return self.json(obj) def addMessage(self): """ Add a message to the project discussion stream. POST Parameters: --------------- project_id -- The id of the project main_text -- The message contents attachment_id -- (optional) The file attachment on the message. If no file attachment is available, it should be an empty string or left off of the request entirely. """ if (self.request('main_text')): return False projectId = self.request('project_id') message = self.request('message') # If the file_id is None or empty string, record it as None. attachmentId = self.request('attachment_id') or None if (not projectId): log.error("*** message add attempted w/o project id") return False elif (util.strNullOrEmpty(message)): log.error("*** message add attempted w/ no message") return False else: return mProject.addMessage(self.db, projectId, message, 'member_comment', self.user.id, attachmentId=attachmentId) def removeMessage(self): messageId = self.request('message_id') if (not messageId): log.error("*** message remove attempted w/o ids") return False else: return mProject.removeMessage(self.db, messageId) def getMessages(self): projectId = self.request('project_id') limit = util.try_f(int, self.request('n_messages'), 10) offset = util.try_f(int, self.request('offset'), 0) filterBy = self.request('filter') return self.json(mProject.getMessages(self.db, projectId, limit, offset, filterBy)) def getFeaturedProjects(self): # overkill to get the full dictionary, but it's a small admin-only call projects = mProject.getFeaturedProjectsDictionary(self.db) return self.json(projects) def getProjectData(self): projectId = self.request('project_id') project = mProject.Project(self.db, projectId) return self.json(mProject.smallProject(project.id, project.data.title, project.data.description, project.data.image_id, project.data.num_members, project.data.owner_user_id, project.data.owner_first_name, project.data.owner_last_name, project.data.owner_image_id)) def addKeywords(self): projectId = self.request('project_id') keywords = self.request('text') if (projectId and keywords): return mProject.addKeywords(self.db, projectId, keywords.split(',')) else: log.error("*** add keyword attempted w/o project id or keywords") return False def removeKeyword(self): projectId = self.request('project_id') keyword = self.request('text') return mProject.removeKeyword(self.db, projectId, keyword) def leaveProject(self): userId = self.session.user_id projectId = self.request('project_id') return mProject.removeUserFromProject(self.db, projectId, userId) def removeUser(self): projectId = self.request('project_id') userId = self.request('user_id') return mProject.removeUserFromProject(self.db, projectId, userId) def updateImage(self): projectId = self.request('project_id') imageId = self.request('image_id') return mProject.updateProjectImage(self.db, projectId, imageId) def updateDescription(self): projectId = self.request('project_id') description = self.request('text') return mProject.updateProjectDescription(self.db, projectId, description) def updateTitle(self): project_id = self.request('project_id') title = self.request('title') num_flags = helpers.censor.badwords(self.db, title) if num_flags == 2: return False project = self.orm.query(models.Project).get(project_id) if project is None: return False project.title = title self.orm.commit() return True def setAdmin(self, b): projectId = self.request('project_id') userId = self.request('user_id') projectUser = self.orm.query(models.ProjectMember).get((userId, projectId)) # TODO prevent last admin from being deleted # TODO on delete of creator, make oldest admin creator if projectUser: projectUser.is_project_admin = b self.orm.commit() return True else: return False
localprojects/Change-By-Us
controllers/project.py
Python
agpl-3.0
20,727
# myaixterm.py: custom color mappings for the aixterm 256-color palette. # Copyright (c) cxw 2015 import itertools import csv import os _DEF_COLOR_FN= 'myaixterm-db.txt' aix_colors={} def get_all_colors(): return aix_colors def aix_fg(color): """ Returns a string that will set the foreground to _color_, which can be a color number (0..255) or a name in aix_colors. """ if isinstance(color,str): colornum=aix_colors[color] else: colornum=color return '\033[38;5;%dm'%colornum def aix_bg(color): """ Returns a string that will set the background to _color_, which can be a color number (0..255) or a name in aix_colors. """ if isinstance(color,str): colornum=aix_colors[color] else: colornum=color return '\033[48;5;%dm'%colornum def aix_normal(): """ Returns a string that will set the foreground and background to their default colors. """ return '\033[0m' def aix_init(fn=_DEF_COLOR_FN): with open(os.path.join(os.path.dirname(__file__), fn)) as fd: reallines=itertools.filterfalse(lambda r: r.startswith('#'), fd) for row in csv.DictReader(reallines, fieldnames=['r','g','b','n'], restkey='names'): for name in row['names']: aix_colors[name]=int(row['n']) #end foreach name #end foreach row #end with # end aix_init
fareskalaboud/pybugger
pybugger/myaixterm.py
Python
gpl-3.0
1,472
import logging from pip._vendor.packaging.utils import canonicalize_name from pip._internal.exceptions import ( DistributionNotFound, InstallationError, UnsupportedPythonVersion, UnsupportedWheel, ) from pip._internal.models.wheel import Wheel from pip._internal.req.req_install import InstallRequirement from pip._internal.utils.compatibility_tags import get_supported from pip._internal.utils.hashes import Hashes from pip._internal.utils.misc import ( dist_in_site_packages, dist_in_usersite, get_installed_distributions, ) from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.utils.virtualenv import running_under_virtualenv from .base import Constraint from .candidates import ( AlreadyInstalledCandidate, EditableCandidate, ExtrasCandidate, LinkCandidate, RequiresPythonCandidate, ) from .found_candidates import FoundCandidates from .requirements import ( ExplicitRequirement, RequiresPythonRequirement, SpecifierRequirement, ) if MYPY_CHECK_RUNNING: from typing import ( FrozenSet, Dict, Iterable, Iterator, List, Optional, Sequence, Set, Tuple, TypeVar, ) from pip._vendor.packaging.specifiers import SpecifierSet from pip._vendor.packaging.version import _BaseVersion from pip._vendor.pkg_resources import Distribution from pip._vendor.resolvelib import ResolutionImpossible from pip._internal.cache import CacheEntry, WheelCache from pip._internal.index.package_finder import PackageFinder from pip._internal.models.link import Link from pip._internal.operations.prepare import RequirementPreparer from pip._internal.resolution.base import InstallRequirementProvider from .base import Candidate, Requirement from .candidates import BaseCandidate C = TypeVar("C") Cache = Dict[Link, C] VersionCandidates = Dict[_BaseVersion, Candidate] logger = logging.getLogger(__name__) class Factory(object): def __init__( self, finder, # type: PackageFinder preparer, # type: RequirementPreparer make_install_req, # type: InstallRequirementProvider wheel_cache, # type: Optional[WheelCache] use_user_site, # type: bool force_reinstall, # type: bool ignore_installed, # type: bool ignore_requires_python, # type: bool py_version_info=None, # type: Optional[Tuple[int, ...]] lazy_wheel=False, # type: bool ): # type: (...) -> None self._finder = finder self.preparer = preparer self._wheel_cache = wheel_cache self._python_candidate = RequiresPythonCandidate(py_version_info) self._make_install_req_from_spec = make_install_req self._use_user_site = use_user_site self._force_reinstall = force_reinstall self._ignore_requires_python = ignore_requires_python self.use_lazy_wheel = lazy_wheel self._link_candidate_cache = {} # type: Cache[LinkCandidate] self._editable_candidate_cache = {} # type: Cache[EditableCandidate] if not ignore_installed: self._installed_dists = { canonicalize_name(dist.project_name): dist for dist in get_installed_distributions(local_only=False) } else: self._installed_dists = {} @property def force_reinstall(self): # type: () -> bool return self._force_reinstall def _make_candidate_from_dist( self, dist, # type: Distribution extras, # type: FrozenSet[str] template, # type: InstallRequirement ): # type: (...) -> Candidate base = AlreadyInstalledCandidate(dist, template, factory=self) if extras: return ExtrasCandidate(base, extras) return base def _make_candidate_from_link( self, link, # type: Link extras, # type: FrozenSet[str] template, # type: InstallRequirement name, # type: Optional[str] version, # type: Optional[_BaseVersion] ): # type: (...) -> Candidate # TODO: Check already installed candidate, and use it if the link and # editable flag match. if template.editable: if link not in self._editable_candidate_cache: self._editable_candidate_cache[link] = EditableCandidate( link, template, factory=self, name=name, version=version, ) base = self._editable_candidate_cache[link] # type: BaseCandidate else: if link not in self._link_candidate_cache: self._link_candidate_cache[link] = LinkCandidate( link, template, factory=self, name=name, version=version, ) base = self._link_candidate_cache[link] if extras: return ExtrasCandidate(base, extras) return base def _iter_found_candidates( self, ireqs, # type: Sequence[InstallRequirement] specifier, # type: SpecifierSet hashes, # type: Hashes prefers_installed, # type: bool ): # type: (...) -> Iterable[Candidate] if not ireqs: return () # The InstallRequirement implementation requires us to give it a # "template". Here we just choose the first requirement to represent # all of them. # Hopefully the Project model can correct this mismatch in the future. template = ireqs[0] name = canonicalize_name(template.req.name) extras = frozenset() # type: FrozenSet[str] for ireq in ireqs: specifier &= ireq.req.specifier hashes &= ireq.hashes(trust_internet=False) extras |= frozenset(ireq.extras) # Get the installed version, if it matches, unless the user # specified `--force-reinstall`, when we want the version from # the index instead. installed_candidate = None if not self._force_reinstall and name in self._installed_dists: installed_dist = self._installed_dists[name] if specifier.contains(installed_dist.version, prereleases=True): installed_candidate = self._make_candidate_from_dist( dist=installed_dist, extras=extras, template=template, ) def iter_index_candidates(): # type: () -> Iterator[Candidate] result = self._finder.find_best_candidate( project_name=name, specifier=specifier, hashes=hashes, ) # PackageFinder returns earlier versions first, so we reverse. for ican in reversed(list(result.iter_applicable())): yield self._make_candidate_from_link( link=ican.link, extras=extras, template=template, name=name, version=ican.version, ) return FoundCandidates( iter_index_candidates, installed_candidate, prefers_installed, ) def find_candidates( self, requirements, # type: Sequence[Requirement] constraint, # type: Constraint prefers_installed, # type: bool ): # type: (...) -> Iterable[Candidate] explicit_candidates = set() # type: Set[Candidate] ireqs = [] # type: List[InstallRequirement] for req in requirements: cand, ireq = req.get_candidate_lookup() if cand is not None: explicit_candidates.add(cand) if ireq is not None: ireqs.append(ireq) # If none of the requirements want an explicit candidate, we can ask # the finder for candidates. if not explicit_candidates: return self._iter_found_candidates( ireqs, constraint.specifier, constraint.hashes, prefers_installed, ) if constraint: name = explicit_candidates.pop().name raise InstallationError( "Could not satisfy constraints for {!r}: installation from " "path or url cannot be constrained to a version".format(name) ) return ( c for c in explicit_candidates if all(req.is_satisfied_by(c) for req in requirements) ) def make_requirement_from_install_req(self, ireq, requested_extras): # type: (InstallRequirement, Iterable[str]) -> Optional[Requirement] if not ireq.match_markers(requested_extras): logger.info( "Ignoring %s: markers '%s' don't match your environment", ireq.name, ireq.markers, ) return None if not ireq.link: return SpecifierRequirement(ireq) if ireq.link.is_wheel: wheel = Wheel(ireq.link.filename) if not wheel.supported(self._finder.target_python.get_tags()): msg = "{} is not a supported wheel on this platform.".format( wheel.filename, ) raise UnsupportedWheel(msg) cand = self._make_candidate_from_link( ireq.link, extras=frozenset(ireq.extras), template=ireq, name=canonicalize_name(ireq.name) if ireq.name else None, version=None, ) return self.make_requirement_from_candidate(cand) def make_requirement_from_candidate(self, candidate): # type: (Candidate) -> ExplicitRequirement return ExplicitRequirement(candidate) def make_requirement_from_spec( self, specifier, # type: str comes_from, # type: InstallRequirement requested_extras=(), # type: Iterable[str] ): # type: (...) -> Optional[Requirement] ireq = self._make_install_req_from_spec(specifier, comes_from) return self.make_requirement_from_install_req(ireq, requested_extras) def make_requires_python_requirement(self, specifier): # type: (Optional[SpecifierSet]) -> Optional[Requirement] if self._ignore_requires_python or specifier is None: return None return RequiresPythonRequirement(specifier, self._python_candidate) def get_wheel_cache_entry(self, link, name): # type: (Link, Optional[str]) -> Optional[CacheEntry] """Look up the link in the wheel cache. If ``preparer.require_hashes`` is True, don't use the wheel cache, because cached wheels, always built locally, have different hashes than the files downloaded from the index server and thus throw false hash mismatches. Furthermore, cached wheels at present have nondeterministic contents due to file modification times. """ if self._wheel_cache is None or self.preparer.require_hashes: return None return self._wheel_cache.get_cache_entry( link=link, package_name=name, supported_tags=get_supported(), ) def get_dist_to_uninstall(self, candidate): # type: (Candidate) -> Optional[Distribution] # TODO: Are there more cases this needs to return True? Editable? dist = self._installed_dists.get(candidate.name) if dist is None: # Not installed, no uninstallation required. return None # We're installing into global site. The current installation must # be uninstalled, no matter it's in global or user site, because the # user site installation has precedence over global. if not self._use_user_site: return dist # We're installing into user site. Remove the user site installation. if dist_in_usersite(dist): return dist # We're installing into user site, but the installed incompatible # package is in global site. We can't uninstall that, and would let # the new user installation to "shadow" it. But shadowing won't work # in virtual environments, so we error out. if running_under_virtualenv() and dist_in_site_packages(dist): raise InstallationError( "Will not install to the user site because it will " "lack sys.path precedence to {} in {}".format( dist.project_name, dist.location, ) ) return None def _report_requires_python_error( self, requirement, # type: RequiresPythonRequirement template, # type: Candidate ): # type: (...) -> UnsupportedPythonVersion message_format = ( "Package {package!r} requires a different Python: " "{version} not in {specifier!r}" ) message = message_format.format( package=template.name, version=self._python_candidate.version, specifier=str(requirement.specifier), ) return UnsupportedPythonVersion(message) def get_installation_error(self, e): # type: (ResolutionImpossible) -> InstallationError assert e.causes, "Installation error reported with no cause" # If one of the things we can't solve is "we need Python X.Y", # that is what we report. for cause in e.causes: if isinstance(cause.requirement, RequiresPythonRequirement): return self._report_requires_python_error( cause.requirement, cause.parent, ) # Otherwise, we have a set of causes which can't all be satisfied # at once. # The simplest case is when we have *one* cause that can't be # satisfied. We just report that case. if len(e.causes) == 1: req, parent = e.causes[0] if parent is None: req_disp = str(req) else: req_disp = '{} (from {})'.format(req, parent.name) logger.critical( "Could not find a version that satisfies the requirement %s", req_disp, ) return DistributionNotFound( 'No matching distribution found for {}'.format(req) ) # OK, we now have a list of requirements that can't all be # satisfied at once. # A couple of formatting helpers def text_join(parts): # type: (List[str]) -> str if len(parts) == 1: return parts[0] return ", ".join(parts[:-1]) + " and " + parts[-1] def readable_form(cand): # type: (Candidate) -> str return "{} {}".format(cand.name, cand.version) def describe_trigger(parent): # type: (Candidate) -> str ireq = parent.get_install_requirement() if not ireq or not ireq.comes_from: return "{} {}".format(parent.name, parent.version) if isinstance(ireq.comes_from, InstallRequirement): return str(ireq.comes_from.name) return str(ireq.comes_from) triggers = [] for req, parent in e.causes: if parent is None: # This is a root requirement, so we can report it directly trigger = req.format_for_error() else: trigger = describe_trigger(parent) triggers.append(trigger) if triggers: info = text_join(triggers) else: info = "the requested packages" msg = "Cannot install {} because these package versions " \ "have conflicting dependencies.".format(info) logger.critical(msg) msg = "\nThe conflict is caused by:" for req, parent in e.causes: msg = msg + "\n " if parent: msg = msg + "{} {} depends on ".format( parent.name, parent.version ) else: msg = msg + "The user requested " msg = msg + req.format_for_error() msg = msg + "\n\n" + \ "To fix this you could try to:\n" + \ "1. loosen the range of package versions you've specified\n" + \ "2. remove package versions to allow pip attempt to solve " + \ "the dependency conflict\n" logger.info(msg) return DistributionNotFound( "ResolutionImpossible: for help visit " "https://pip.pypa.io/en/latest/user_guide/" "#fixing-conflicting-dependencies" )
RalfBarkow/Zettelkasten
venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/factory.py
Python
gpl-3.0
16,888
import sys import types import pkg_resources import pytest import pandas.util._test_decorators as td import pandas dummy_backend = types.ModuleType("pandas_dummy_backend") setattr(dummy_backend, "plot", lambda *args, **kwargs: "used_dummy") @pytest.fixture def restore_backend(): """Restore the plotting backend to matplotlib""" with pandas.option_context("plotting.backend", "matplotlib"): yield def test_backend_is_not_module(): msg = "Could not find plotting backend 'not_an_existing_module'." with pytest.raises(ValueError, match=msg): pandas.set_option("plotting.backend", "not_an_existing_module") assert pandas.options.plotting.backend == "matplotlib" def test_backend_is_correct(monkeypatch, restore_backend): monkeypatch.setitem(sys.modules, "pandas_dummy_backend", dummy_backend) pandas.set_option("plotting.backend", "pandas_dummy_backend") assert pandas.get_option("plotting.backend") == "pandas_dummy_backend" assert ( pandas.plotting._core._get_plot_backend("pandas_dummy_backend") is dummy_backend ) def test_backend_can_be_set_in_plot_call(monkeypatch, restore_backend): monkeypatch.setitem(sys.modules, "pandas_dummy_backend", dummy_backend) df = pandas.DataFrame([1, 2, 3]) assert pandas.get_option("plotting.backend") == "matplotlib" assert df.plot(backend="pandas_dummy_backend") == "used_dummy" @td.skip_if_no_mpl def test_register_entrypoint(restore_backend): dist = pkg_resources.get_distribution("pandas") if dist.module_path not in pandas.__file__: # We are running from a non-installed pandas, and this test is invalid pytest.skip("Testing a non-installed pandas") mod = types.ModuleType("my_backend") mod.plot = lambda *args, **kwargs: 1 backends = pkg_resources.get_entry_map("pandas") my_entrypoint = pkg_resources.EntryPoint( "pandas_plotting_backend", mod.__name__, dist=dist ) backends["pandas_plotting_backends"]["my_backend"] = my_entrypoint # TODO: the docs recommend importlib.util.module_from_spec. But this works for now. sys.modules["my_backend"] = mod result = pandas.plotting._core._get_plot_backend("my_backend") assert result is mod # TODO(GH#27517): https://github.com/pandas-dev/pandas/issues/27517 # Remove the td.skip_if_no_mpl with pandas.option_context("plotting.backend", "my_backend"): result = pandas.plotting._core._get_plot_backend() assert result is mod def test_setting_backend_without_plot_raises(): # GH-28163 module = types.ModuleType("pandas_plot_backend") sys.modules["pandas_plot_backend"] = module assert pandas.options.plotting.backend == "matplotlib" with pytest.raises( ValueError, match="Could not find plotting backend 'pandas_plot_backend'." ): pandas.set_option("plotting.backend", "pandas_plot_backend") assert pandas.options.plotting.backend == "matplotlib" @td.skip_if_mpl def test_no_matplotlib_ok(): msg = ( 'matplotlib is required for plotting when the default backend "matplotlib" is ' "selected." ) with pytest.raises(ImportError, match=msg): pandas.plotting._core._get_plot_backend("matplotlib") def test_extra_kinds_ok(monkeypatch, restore_backend): # https://github.com/pandas-dev/pandas/pull/28647 monkeypatch.setitem(sys.modules, "pandas_dummy_backend", dummy_backend) pandas.set_option("plotting.backend", "pandas_dummy_backend") df = pandas.DataFrame({"A": [1, 2, 3]}) df.plot(kind="not a real kind")
pandas-dev/pandas
pandas/tests/plotting/test_backend.py
Python
bsd-3-clause
3,593
# -*- coding: utf-8 -*- # vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4 ############################################################################### # OpenLP - Open Source Lyrics Projection # # --------------------------------------------------------------------------- # # Copyright (c) 2008-2015 OpenLP Developers # # --------------------------------------------------------------------------- # # This program is free software; you can redistribute it and/or modify it # # under the terms of the GNU General Public License as published by the Free # # Software Foundation; version 2 of the License. # # # # This program is distributed in the hope that it will be useful, but WITHOUT # # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # # more details. # # # # You should have received a copy of the GNU General Public License along # # with this program; if not, write to the Free Software Foundation, Inc., 59 # # Temple Place, Suite 330, Boston, MA 02111-1307 USA # ############################################################################### """ The :mod:`~openlp.plugins.songs.songsplugin` module contains the Plugin class for the Songs plugin. """ import logging import os from tempfile import gettempdir import sqlite3 from PyQt4 import QtCore, QtGui from openlp.core.common import UiStrings, Registry, translate from openlp.core.lib import Plugin, StringContent, build_icon from openlp.core.lib.db import Manager from openlp.core.lib.ui import create_action from openlp.core.utils.actions import ActionList from openlp.plugins.songs.forms.duplicatesongremovalform import DuplicateSongRemovalForm from openlp.plugins.songs.forms.songselectform import SongSelectForm from openlp.plugins.songs.lib import clean_song, upgrade from openlp.plugins.songs.lib.db import init_schema, Song from openlp.plugins.songs.lib.mediaitem import SongSearch from openlp.plugins.songs.lib.importer import SongFormat from openlp.plugins.songs.lib.importers.openlp import OpenLPSongImport from openlp.plugins.songs.lib.mediaitem import SongMediaItem from openlp.plugins.songs.lib.songstab import SongsTab log = logging.getLogger(__name__) __default_settings__ = { 'songs/db type': 'sqlite', 'songs/db username': '', 'songs/db password': '', 'songs/db hostname': '', 'songs/db database': '', 'songs/last search type': SongSearch.Entire, 'songs/last import type': SongFormat.OpenLyrics, 'songs/update service on edit': False, 'songs/search as type': True, 'songs/add song from service': True, 'songs/display songbar': True, 'songs/display songbook': False, 'songs/display copyright symbol': False, 'songs/last directory import': '', 'songs/last directory export': '', 'songs/songselect username': '', 'songs/songselect password': '', 'songs/songselect searches': '' } class SongsPlugin(Plugin): """ This plugin enables the user to create, edit and display songs. Songs are divided into verses, and the verse order can be specified. Authors, topics and song books can be assigned to songs as well. """ log.info('Song Plugin loaded') def __init__(self): """ Create and set up the Songs plugin. """ super(SongsPlugin, self).__init__('songs', __default_settings__, SongMediaItem, SongsTab) self.manager = Manager('songs', init_schema, upgrade_mod=upgrade) self.weight = -10 self.icon_path = ':/plugins/plugin_songs.png' self.icon = build_icon(self.icon_path) self.songselect_form = None def check_pre_conditions(self): """ Check the plugin can run. """ return self.manager.session is not None def initialise(self): """ Initialise the plugin """ log.info('Songs Initialising') super(SongsPlugin, self).initialise() self.songselect_form = SongSelectForm(Registry().get('main_window'), self, self.manager) self.songselect_form.initialise() self.song_import_item.setVisible(True) self.song_export_item.setVisible(True) self.tools_reindex_item.setVisible(True) self.tools_find_duplicates.setVisible(True) action_list = ActionList.get_instance() action_list.add_action(self.song_import_item, UiStrings().Import) action_list.add_action(self.song_export_item, UiStrings().Export) action_list.add_action(self.tools_reindex_item, UiStrings().Tools) action_list.add_action(self.tools_find_duplicates, UiStrings().Tools) def add_import_menu_item(self, import_menu): """ Give the Songs plugin the opportunity to add items to the **Import** menu. :param import_menu: The actual **Import** menu item, so that your actions can use it as their parent. """ # Main song import menu item - will eventually be the only one self.song_import_item = create_action( import_menu, 'songImportItem', text=translate('SongsPlugin', '&Song'), tooltip=translate('SongsPlugin', 'Import songs using the import wizard.'), triggers=self.on_song_import_item_clicked) import_menu.addAction(self.song_import_item) self.import_songselect_item = create_action( import_menu, 'import_songselect_item', text=translate('SongsPlugin', 'CCLI SongSelect'), statustip=translate('SongsPlugin', 'Import songs from CCLI\'s SongSelect service.'), triggers=self.on_import_songselect_item_triggered ) import_menu.addAction(self.import_songselect_item) def add_export_menu_item(self, export_menu): """ Give the Songs plugin the opportunity to add items to the **Export** menu. :param export_menu: The actual **Export** menu item, so that your actions can use it as their parent. """ # Main song import menu item - will eventually be the only one self.song_export_item = create_action( export_menu, 'songExportItem', text=translate('SongsPlugin', '&Song'), tooltip=translate('SongsPlugin', 'Exports songs using the export wizard.'), triggers=self.on_song_export_item_clicked) export_menu.addAction(self.song_export_item) def add_tools_menu_item(self, tools_menu): """ Give the Songs plugin the opportunity to add items to the **Tools** menu. :param tools_menu: The actual **Tools** menu item, so that your actions can use it as their parent. """ log.info('add tools menu') self.tools_reindex_item = create_action( tools_menu, 'toolsReindexItem', text=translate('SongsPlugin', '&Re-index Songs'), icon=':/plugins/plugin_songs.png', statustip=translate('SongsPlugin', 'Re-index the songs database to improve searching and ordering.'), visible=False, triggers=self.on_tools_reindex_item_triggered) tools_menu.addAction(self.tools_reindex_item) self.tools_find_duplicates = create_action( tools_menu, 'toolsFindDuplicates', text=translate('SongsPlugin', 'Find &Duplicate Songs'), statustip=translate('SongsPlugin', 'Find and remove duplicate songs in the song database.'), visible=False, triggers=self.on_tools_find_duplicates_triggered, can_shortcuts=True) tools_menu.addAction(self.tools_find_duplicates) def on_tools_reindex_item_triggered(self): """ Rebuild each song. """ max_songs = self.manager.get_object_count(Song) if max_songs == 0: return progress_dialog = QtGui.QProgressDialog( translate('SongsPlugin', 'Reindexing songs...'), UiStrings().Cancel, 0, max_songs, self.main_window) progress_dialog.setWindowTitle(translate('SongsPlugin', 'Reindexing songs')) progress_dialog.setWindowModality(QtCore.Qt.WindowModal) songs = self.manager.get_all_objects(Song) for number, song in enumerate(songs): clean_song(self.manager, song) progress_dialog.setValue(number + 1) self.manager.save_objects(songs) self.media_item.on_search_text_button_clicked() def on_tools_find_duplicates_triggered(self): """ Search for duplicates in the song database. """ DuplicateSongRemovalForm(self).exec_() def on_import_songselect_item_triggered(self): """ Run the SongSelect importer. """ self.songselect_form.exec_() self.media_item.on_search_text_button_clicked() def on_song_import_item_clicked(self): """ Run the song import wizard. """ if self.media_item: self.media_item.on_import_click() def on_song_export_item_clicked(self): """ Run the song export wizard. """ if self.media_item: self.media_item.on_export_click() def about(self): """ Provides information for the plugin manager to display. :return: A translatable string with some basic information about the Songs plugin """ return translate('SongsPlugin', '<strong>Songs Plugin</strong>' '<br />The songs plugin provides the ability to display and manage songs.') def uses_theme(self, theme): """ Called to find out if the song plugin is currently using a theme. :param theme: The theme to check for usage :return: True if the theme is being used, otherwise returns False """ if self.manager.get_all_objects(Song, Song.theme_name == theme): return True return False def rename_theme(self, old_theme, new_theme): """ Renames a theme the song plugin is using making the plugin use the new name. :param old_theme: The name of the theme the plugin should stop using. :param new_theme: The new name the plugin should now use. """ songs_using_theme = self.manager.get_all_objects(Song, Song.theme_name == old_theme) for song in songs_using_theme: song.theme_name = new_theme self.manager.save_object(song) def import_songs(self, import_format, **kwargs): """ Add the correct importer class :param import_format: The import_format to be used :param kwargs: The arguments :return: the correct importer """ class_ = SongFormat.get(import_format, 'class') importer = class_(self.manager, **kwargs) importer.register(self.media_item.import_wizard) return importer def set_plugin_text_strings(self): """ Called to define all translatable texts of the plugin """ # Name PluginList self.text_strings[StringContent.Name] = { 'singular': translate('SongsPlugin', 'Song', 'name singular'), 'plural': translate('SongsPlugin', 'Songs', 'name plural') } # Name for MediaDockManager, SettingsManager self.text_strings[StringContent.VisibleName] = { 'title': translate('SongsPlugin', 'Songs', 'container title') } # Middle Header Bar tooltips = { 'load': '', 'import': '', 'new': translate('SongsPlugin', 'Add a new song.'), 'edit': translate('SongsPlugin', 'Edit the selected song.'), 'delete': translate('SongsPlugin', 'Delete the selected song.'), 'preview': translate('SongsPlugin', 'Preview the selected song.'), 'live': translate('SongsPlugin', 'Send the selected song live.'), 'service': translate('SongsPlugin', 'Add the selected song to the service.') } self.set_plugin_ui_text_strings(tooltips) def first_time(self): """ If the first time wizard has run, this function is run to import all the new songs into the database. """ self.application.process_events() self.on_tools_reindex_item_triggered() self.application.process_events() db_dir = os.path.join(gettempdir(), 'openlp') if not os.path.exists(db_dir): return song_dbs = [] song_count = 0 for sfile in os.listdir(db_dir): if sfile.startswith('songs_') and sfile.endswith('.sqlite'): self.application.process_events() song_dbs.append(os.path.join(db_dir, sfile)) song_count += self._count_songs(os.path.join(db_dir, sfile)) if not song_dbs: return self.application.process_events() progress = QtGui.QProgressDialog(self.main_window) progress.setWindowModality(QtCore.Qt.WindowModal) progress.setWindowTitle(translate('OpenLP.Ui', 'Importing Songs')) progress.setLabelText(translate('OpenLP.Ui', 'Starting import...')) progress.setCancelButton(None) progress.setRange(0, song_count) progress.setMinimumDuration(0) progress.forceShow() self.application.process_events() for db in song_dbs: importer = OpenLPSongImport(self.manager, filename=db) importer.do_import(progress) self.application.process_events() progress.setValue(song_count) self.media_item.on_search_text_button_clicked() def finalise(self): """ Time to tidy up on exit """ log.info('Songs Finalising') self.new_service_created() # Clean up files and connections self.manager.finalise() self.song_import_item.setVisible(False) self.song_export_item.setVisible(False) self.tools_reindex_item.setVisible(False) self.tools_find_duplicates.setVisible(False) action_list = ActionList.get_instance() action_list.remove_action(self.song_import_item, UiStrings().Import) action_list.remove_action(self.song_export_item, UiStrings().Export) action_list.remove_action(self.tools_reindex_item, UiStrings().Tools) action_list.remove_action(self.tools_find_duplicates, UiStrings().Tools) super(SongsPlugin, self).finalise() def new_service_created(self): """ Remove temporary songs from the database """ songs = self.manager.get_all_objects(Song, Song.temporary is True) for song in songs: self.manager.delete_object(Song, song.id) def _count_songs(self, db_file): """ Provide a count of the songs in the database :param db_file: the database name to count """ connection = sqlite3.connect(db_file) cursor = connection.cursor() cursor.execute('SELECT COUNT(id) AS song_count FROM songs') song_count = cursor.fetchone()[0] connection.close() try: song_count = int(song_count) except (TypeError, ValueError): song_count = 0 return song_count
crossroadchurch/paul
openlp/plugins/songs/songsplugin.py
Python
gpl-2.0
15,565
# -*- coding: utf-8 -*- def command(): return "edit-instance-vmware" def init_argument(parser): parser.add_argument("--instance-no", required=True) parser.add_argument("--instance-type", required=True) parser.add_argument("--key-name", required=True) parser.add_argument("--compute-resource", required=True) parser.add_argument("--is-static-ip", required=True) parser.add_argument("--ip-address", required=False) parser.add_argument("--subnet-mask", required=False) parser.add_argument("--default-gateway", required=False) parser.add_argument("--comment", required=False) parser.add_argument("--root-size", required=False) def execute(requester, args): instance_no = args.instance_no instance_type = args.instance_type key_name = args.key_name compute_resource = args.compute_resource is_static_ip = args.is_static_ip ip_address = args.ip_address subnet_mask = args.subnet_mask default_gateway = args.default_gateway comment = args.comment root_size = args.root_size parameters = {} parameters["InstanceNo"] = instance_no parameters["InstanceType"] = instance_type parameters["KeyName"] = key_name parameters["ComputeResource"] = compute_resource parameters["IsStaticIp"] = is_static_ip if (ip_address != None): parameters["IpAddress"] = ip_address if (subnet_mask != None): parameters["SubnetMask"] = subnet_mask if (default_gateway != None): parameters["DefaultGateway"] = default_gateway if (comment != None): parameters["Comment"] = comment if (root_size != None): parameters["RootSize"] = root_size return requester.execute("/EditInstanceVmware", parameters)
primecloud-controller-org/pcc-cli
src/pcc/api/instance/edit_instance_vmware.py
Python
apache-2.0
1,746
#!/usr/bin/env python # coding=utf-8 """ Excel工具类, 简化操作 """ class ExcelUtil(object): """ Excel工具类, 简化操作 """ def __init__(self): pass
ctrlzhang/java_dto_generator
ExcelUtil.py
Python
apache-2.0
202
#!/usr/bin/env python3 # # Migration test direct invokation command # # Copyright (c) 2016 Red Hat, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, see <http://www.gnu.org/licenses/>. # import sys from guestperf.shell import Shell shell = Shell() sys.exit(shell.run(sys.argv[1:]))
dslutz/qemu
tests/migration/guestperf.py
Python
gpl-2.0
862
NAME="Local Audio Player Backend" AUTHOR="Martin Altmayer" VERSION="1.0" DESCRIPTION="Provides an audio backend using the QtMultimedia framework."
maestromusic/maestro
maestro/plugins/localplay/__init__.py
Python
gpl-3.0
147
def __init__(self, enum): self._enum = enum def typify(self, character): v = ord(character) u = character if 65 <= v and v <= 90 or 97 <= v and v <= 122 or \ v == 95 or 48 <= v and v <=57: return self._enum.ALPHA_DIGIT_OR_UNDERSCORE if u == '`': return self._enum.QUOTE_SYMBOL if u in ['(', ')', '[', ']', '{', '}']: return self._enum.GROUPING_SYMBOL elif u in [' ', '\n', '\r', '\f', '\v', '\t']: return self._enum.WHITESPACE elif v >= 128 or v in [0, 1, 2, 3, 4, 5, 6, 7, 8, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 127]: return self._enum.UNKNOWN_CODE_POINT else: return self._enum.OTHER_SYMBOL
BourgondAries/Unnamed-Language
CxyP/lex/typify/Typifier.py
Python
gpl-3.0
665
""" matplotlib includes a framework for arbitrary geometric transformations that is used determine the final position of all elements drawn on the canvas. Transforms are composed into trees of :class:`TransformNode` objects whose actual value depends on their children. When the contents of children change, their parents are automatically invalidated. The next time an invalidated transform is accessed, it is recomputed to reflect those changes. This invalidation/caching approach prevents unnecessary recomputations of transforms, and contributes to better interactive performance. For example, here is a graph of the transform tree used to plot data to the graph: .. image:: ../_static/transforms.png The framework can be used for both affine and non-affine transformations. However, for speed, we want use the backend renderers to perform affine transformations whenever possible. Therefore, it is possible to perform just the affine or non-affine part of a transformation on a set of data. The affine is always assumed to occur after the non-affine. For any transform:: full transform == non-affine part + affine part The backends are not expected to handle non-affine transformations themselves. """ from __future__ import print_function, division import numpy as np from numpy import ma from matplotlib._path import (affine_transform, count_bboxes_overlapping_bbox, update_path_extents) from numpy.linalg import inv from weakref import WeakValueDictionary import warnings try: set except NameError: from sets import Set as set from path import Path DEBUG = False MaskedArray = ma.MaskedArray class TransformNode(object): """ :class:`TransformNode` is the base class for anything that participates in the transform tree and needs to invalidate its parents or be invalidated. This includes classes that are not really transforms, such as bounding boxes, since some transforms depend on bounding boxes to compute their values. """ _gid = 0 # Invalidation may affect only the affine part. If the # invalidation was "affine-only", the _invalid member is set to # INVALID_AFFINE_ONLY INVALID_NON_AFFINE = 1 INVALID_AFFINE = 2 INVALID = INVALID_NON_AFFINE | INVALID_AFFINE # Some metadata about the transform, used to determine whether an # invalidation is affine-only is_affine = False is_bbox = False pass_through = False """ If pass_through is True, all ancestors will always be invalidated, even if 'self' is already invalid. """ def __init__(self, shorthand_name=None): """ Creates a new :class:`TransformNode`. **shorthand_name** - a string representing the "name" of this transform. The name carries no significance other than to improve the readability of ``str(transform)`` when DEBUG=True. """ # Parents are stored in a WeakValueDictionary, so that if the # parents are deleted, references from the children won't keep # them alive. self._parents = WeakValueDictionary() # TransformNodes start out as invalid until their values are # computed for the first time. self._invalid = 1 self._shorthand_name = shorthand_name or '' if DEBUG: def __str__(self): # either just return the name of this TransformNode, or it's repr return self._shorthand_name or repr(self) def __getstate__(self): d = self.__dict__.copy() # turn the weakkey dictionary into a normal dictionary d['_parents'] = dict(self._parents.iteritems()) return d def __setstate__(self, data_dict): self.__dict__ = data_dict # turn the normal dictionary back into a WeakValueDictionary self._parents = WeakValueDictionary(self._parents) def __copy__(self, *args): raise NotImplementedError( "TransformNode instances can not be copied. " + "Consider using frozen() instead.") __deepcopy__ = __copy__ def invalidate(self): """ Invalidate this :class:`TransformNode` and triggers an invalidation of its ancestors. Should be called any time the transform changes. """ value = self.INVALID if self.is_affine: value = self.INVALID_AFFINE return self._invalidate_internal(value, invalidating_node=self) def _invalidate_internal(self, value, invalidating_node): """ Called by :meth:`invalidate` and subsequently ascends the transform stack calling each TransformNode's _invalidate_internal method. """ # determine if this call will be an extension to the invalidation # status. If not, then a shortcut means that we needn't invoke an # invalidation up the transform stack as it will already have been # invalidated. # N.B This makes the invalidation sticky, once a transform has been # invalidated as NON_AFFINE, then it will always be invalidated as # NON_AFFINE even when triggered with a AFFINE_ONLY invalidation. # In most cases this is not a problem (i.e. for interactive panning and # zooming) and the only side effect will be on performance. status_changed = self._invalid < value if self.pass_through or status_changed: self._invalid = value for parent in self._parents.values(): parent._invalidate_internal(value=value, invalidating_node=self) def set_children(self, *children): """ Set the children of the transform, to let the invalidation system know which transforms can invalidate this transform. Should be called from the constructor of any transforms that depend on other transforms. """ for child in children: child._parents[id(self)] = self if DEBUG: _set_children = set_children def set_children(self, *children): self._set_children(*children) self._children = children set_children.__doc__ = _set_children.__doc__ def frozen(self): """ Returns a frozen copy of this transform node. The frozen copy will not update when its children change. Useful for storing a previously known state of a transform where ``copy.deepcopy()`` might normally be used. """ return self if DEBUG: def write_graphviz(self, fobj, highlight=[]): """ For debugging purposes. Writes the transform tree rooted at 'self' to a graphviz "dot" format file. This file can be run through the "dot" utility to produce a graph of the transform tree. Affine transforms are marked in blue. Bounding boxes are marked in yellow. *fobj*: A Python file-like object Once the "dot" file has been created, it can be turned into a png easily with:: $> dot -Tpng -o $OUTPUT_FILE $DOT_FILE """ seen = set() def recurse(root): if root in seen: return seen.add(root) props = {} label = root.__class__.__name__ if root._invalid: label = '[%s]' % label if root in highlight: props['style'] = 'bold' props['shape'] = 'box' props['label'] = '"%s"' % label props = ' '.join(['%s=%s' % (key, val) for key, val in props.iteritems()]) fobj.write('%s [%s];\n' % (hash(root), props)) if hasattr(root, '_children'): for child in root._children: name = '?' for key, val in root.__dict__.iteritems(): if val is child: name = key break fobj.write('"%s" -> "%s" [label="%s", fontsize=10];\n' % (hash(root), hash(child), name)) recurse(child) fobj.write("digraph G {\n") recurse(self) fobj.write("}\n") class BboxBase(TransformNode): """ This is the base class of all bounding boxes, and provides read-only access to its data. A mutable bounding box is provided by the :class:`Bbox` class. The canonical representation is as two points, with no restrictions on their ordering. Convenience properties are provided to get the left, bottom, right and top edges and width and height, but these are not stored explicitly. """ is_bbox = True is_affine = True #* Redundant: Removed for performance # # def __init__(self): # TransformNode.__init__(self) if DEBUG: def _check(points): if ma.isMaskedArray(points): warnings.warn("Bbox bounds are a masked array.") points = np.asarray(points) if (points[1, 0] - points[0, 0] == 0 or points[1, 1] - points[0, 1] == 0): warnings.warn("Singular Bbox.") _check = staticmethod(_check) def frozen(self): return Bbox(self.get_points().copy()) frozen.__doc__ = TransformNode.__doc__ def __array__(self, *args, **kwargs): return self.get_points() def is_unit(self): """ Returns True if the :class:`Bbox` is the unit bounding box from (0, 0) to (1, 1). """ return list(self.get_points().flatten()) == [0., 0., 1., 1.] def _get_x0(self): return self.get_points()[0, 0] x0 = property(_get_x0, None, None, """ (property) :attr:`x0` is the first of the pair of *x* coordinates that define the bounding box. :attr:`x0` is not guaranteed to be less than :attr:`x1`. If you require that, use :attr:`xmin`.""") def _get_y0(self): return self.get_points()[0, 1] y0 = property(_get_y0, None, None, """ (property) :attr:`y0` is the first of the pair of *y* coordinates that define the bounding box. :attr:`y0` is not guaranteed to be less than :attr:`y1`. If you require that, use :attr:`ymin`.""") def _get_x1(self): return self.get_points()[1, 0] x1 = property(_get_x1, None, None, """ (property) :attr:`x1` is the second of the pair of *x* coordinates that define the bounding box. :attr:`x1` is not guaranteed to be greater than :attr:`x0`. If you require that, use :attr:`xmax`.""") def _get_y1(self): return self.get_points()[1, 1] y1 = property(_get_y1, None, None, """ (property) :attr:`y1` is the second of the pair of *y* coordinates that define the bounding box. :attr:`y1` is not guaranteed to be greater than :attr:`y0`. If you require that, use :attr:`ymax`.""") def _get_p0(self): return self.get_points()[0] p0 = property(_get_p0, None, None, """ (property) :attr:`p0` is the first pair of (*x*, *y*) coordinates that define the bounding box. It is not guaranteed to be the bottom-left corner. For that, use :attr:`min`.""") def _get_p1(self): return self.get_points()[1] p1 = property(_get_p1, None, None, """ (property) :attr:`p1` is the second pair of (*x*, *y*) coordinates that define the bounding box. It is not guaranteed to be the top-right corner. For that, use :attr:`max`.""") def _get_xmin(self): return min(self.get_points()[:, 0]) xmin = property(_get_xmin, None, None, """ (property) :attr:`xmin` is the left edge of the bounding box.""") def _get_ymin(self): return min(self.get_points()[:, 1]) ymin = property(_get_ymin, None, None, """ (property) :attr:`ymin` is the bottom edge of the bounding box.""") def _get_xmax(self): return max(self.get_points()[:, 0]) xmax = property(_get_xmax, None, None, """ (property) :attr:`xmax` is the right edge of the bounding box.""") def _get_ymax(self): return max(self.get_points()[:, 1]) ymax = property(_get_ymax, None, None, """ (property) :attr:`ymax` is the top edge of the bounding box.""") def _get_min(self): return [min(self.get_points()[:, 0]), min(self.get_points()[:, 1])] min = property(_get_min, None, None, """ (property) :attr:`min` is the bottom-left corner of the bounding box.""") def _get_max(self): return [max(self.get_points()[:, 0]), max(self.get_points()[:, 1])] max = property(_get_max, None, None, """ (property) :attr:`max` is the top-right corner of the bounding box.""") def _get_intervalx(self): return self.get_points()[:, 0] intervalx = property(_get_intervalx, None, None, """ (property) :attr:`intervalx` is the pair of *x* coordinates that define the bounding box. It is not guaranteed to be sorted from left to right.""") def _get_intervaly(self): return self.get_points()[:, 1] intervaly = property(_get_intervaly, None, None, """ (property) :attr:`intervaly` is the pair of *y* coordinates that define the bounding box. It is not guaranteed to be sorted from bottom to top.""") def _get_width(self): points = self.get_points() return points[1, 0] - points[0, 0] width = property(_get_width, None, None, """ (property) The width of the bounding box. It may be negative if :attr:`x1` < :attr:`x0`.""") def _get_height(self): points = self.get_points() return points[1, 1] - points[0, 1] height = property(_get_height, None, None, """ (property) The height of the bounding box. It may be negative if :attr:`y1` < :attr:`y0`.""") def _get_size(self): points = self.get_points() return points[1] - points[0] size = property(_get_size, None, None, """ (property) The width and height of the bounding box. May be negative, in the same way as :attr:`width` and :attr:`height`.""") def _get_bounds(self): x0, y0, x1, y1 = self.get_points().flatten() return (x0, y0, x1 - x0, y1 - y0) bounds = property(_get_bounds, None, None, """ (property) Returns (:attr:`x0`, :attr:`y0`, :attr:`width`, :attr:`height`).""") def _get_extents(self): return self.get_points().flatten().copy() extents = property(_get_extents, None, None, """ (property) Returns (:attr:`x0`, :attr:`y0`, :attr:`x1`, :attr:`y1`).""") def get_points(self): return NotImplementedError() def containsx(self, x): """ Returns True if *x* is between or equal to :attr:`x0` and :attr:`x1`. """ x0, x1 = self.intervalx return ((x0 < x1 and (x >= x0 and x <= x1)) or (x >= x1 and x <= x0)) def containsy(self, y): """ Returns True if *y* is between or equal to :attr:`y0` and :attr:`y1`. """ y0, y1 = self.intervaly return ((y0 < y1 and (y >= y0 and y <= y1)) or (y >= y1 and y <= y0)) def contains(self, x, y): """ Returns *True* if (*x*, *y*) is a coordinate inside the bounding box or on its edge. """ return self.containsx(x) and self.containsy(y) def overlaps(self, other): """ Returns True if this bounding box overlaps with the given bounding box *other*. """ ax1, ay1, ax2, ay2 = self._get_extents() bx1, by1, bx2, by2 = other._get_extents() if ax2 < ax1: ax2, ax1 = ax1, ax2 if ay2 < ay1: ay2, ay1 = ay1, ay2 if bx2 < bx1: bx2, bx1 = bx1, bx2 if by2 < by1: by2, by1 = by1, by2 return not ((bx2 < ax1) or (by2 < ay1) or (bx1 > ax2) or (by1 > ay2)) def fully_containsx(self, x): """ Returns True if *x* is between but not equal to :attr:`x0` and :attr:`x1`. """ x0, x1 = self.intervalx return ((x0 < x1 and (x > x0 and x < x1)) or (x > x1 and x < x0)) def fully_containsy(self, y): """ Returns True if *y* is between but not equal to :attr:`y0` and :attr:`y1`. """ y0, y1 = self.intervaly return ((y0 < y1 and (y > y0 and y < y1)) or (y > y1 and y < y0)) def fully_contains(self, x, y): """ Returns True if (*x*, *y*) is a coordinate inside the bounding box, but not on its edge. """ return self.fully_containsx(x) \ and self.fully_containsy(y) def fully_overlaps(self, other): """ Returns True if this bounding box overlaps with the given bounding box *other*, but not on its edge alone. """ ax1, ay1, ax2, ay2 = self._get_extents() bx1, by1, bx2, by2 = other._get_extents() if ax2 < ax1: ax2, ax1 = ax1, ax2 if ay2 < ay1: ay2, ay1 = ay1, ay2 if bx2 < bx1: bx2, bx1 = bx1, bx2 if by2 < by1: by2, by1 = by1, by2 return not ((bx2 <= ax1) or (by2 <= ay1) or (bx1 >= ax2) or (by1 >= ay2)) def transformed(self, transform): """ Return a new :class:`Bbox` object, statically transformed by the given transform. """ return Bbox(transform.transform(self.get_points())) def inverse_transformed(self, transform): """ Return a new :class:`Bbox` object, statically transformed by the inverse of the given transform. """ return Bbox(transform.inverted().transform(self.get_points())) coefs = {'C': (0.5, 0.5), 'SW': (0, 0), 'S': (0.5, 0), 'SE': (1.0, 0), 'E': (1.0, 0.5), 'NE': (1.0, 1.0), 'N': (0.5, 1.0), 'NW': (0, 1.0), 'W': (0, 0.5)} def anchored(self, c, container=None): """ Return a copy of the :class:`Bbox`, shifted to position *c* within a container. *c*: may be either: * a sequence (*cx*, *cy*) where *cx* and *cy* range from 0 to 1, where 0 is left or bottom and 1 is right or top * a string: - 'C' for centered - 'S' for bottom-center - 'SE' for bottom-left - 'E' for left - etc. Optional argument *container* is the box within which the :class:`Bbox` is positioned; it defaults to the initial :class:`Bbox`. """ if container is None: container = self l, b, w, h = container.bounds if isinstance(c, basestring): cx, cy = self.coefs[c] else: cx, cy = c L, B, W, H = self.bounds return Bbox(self._points + [(l + cx * (w - W)) - L, (b + cy * (h - H)) - B]) def shrunk(self, mx, my): """ Return a copy of the :class:`Bbox`, shrunk by the factor *mx* in the *x* direction and the factor *my* in the *y* direction. The lower left corner of the box remains unchanged. Normally *mx* and *my* will be less than 1, but this is not enforced. """ w, h = self.size return Bbox([self._points[0], self._points[0] + [mx * w, my * h]]) def shrunk_to_aspect(self, box_aspect, container=None, fig_aspect=1.0): """ Return a copy of the :class:`Bbox`, shrunk so that it is as large as it can be while having the desired aspect ratio, *box_aspect*. If the box coordinates are relative---that is, fractions of a larger box such as a figure---then the physical aspect ratio of that figure is specified with *fig_aspect*, so that *box_aspect* can also be given as a ratio of the absolute dimensions, not the relative dimensions. """ assert box_aspect > 0 and fig_aspect > 0 if container is None: container = self w, h = container.size H = w * box_aspect / fig_aspect if H <= h: W = w else: W = h * fig_aspect / box_aspect H = h return Bbox([self._points[0], self._points[0] + (W, H)]) def splitx(self, *args): """ e.g., ``bbox.splitx(f1, f2, ...)`` Returns a list of new :class:`Bbox` objects formed by splitting the original one with vertical lines at fractional positions *f1*, *f2*, ... """ boxes = [] xf = [0] + list(args) + [1] x0, y0, x1, y1 = self._get_extents() w = x1 - x0 for xf0, xf1 in zip(xf[:-1], xf[1:]): boxes.append(Bbox([[x0 + xf0 * w, y0], [x0 + xf1 * w, y1]])) return boxes def splity(self, *args): """ e.g., ``bbox.splitx(f1, f2, ...)`` Returns a list of new :class:`Bbox` objects formed by splitting the original one with horizontal lines at fractional positions *f1*, *f2*, ... """ boxes = [] yf = [0] + list(args) + [1] x0, y0, x1, y1 = self._get_extents() h = y1 - y0 for yf0, yf1 in zip(yf[:-1], yf[1:]): boxes.append(Bbox([[x0, y0 + yf0 * h], [x1, y0 + yf1 * h]])) return boxes def count_contains(self, vertices): """ Count the number of vertices contained in the :class:`Bbox`. *vertices* is a Nx2 Numpy array. """ if len(vertices) == 0: return 0 vertices = np.asarray(vertices) x0, y0, x1, y1 = self._get_extents() dx0 = np.sign(vertices[:, 0] - x0) dy0 = np.sign(vertices[:, 1] - y0) dx1 = np.sign(vertices[:, 0] - x1) dy1 = np.sign(vertices[:, 1] - y1) inside = (abs(dx0 + dx1) + abs(dy0 + dy1)) <= 2 return np.sum(inside) def count_overlaps(self, bboxes): """ Count the number of bounding boxes that overlap this one. bboxes is a sequence of :class:`BboxBase` objects """ return count_bboxes_overlapping_bbox(self, bboxes) def expanded(self, sw, sh): """ Return a new :class:`Bbox` which is this :class:`Bbox` expanded around its center by the given factors *sw* and *sh*. """ width = self.width height = self.height deltaw = (sw * width - width) / 2.0 deltah = (sh * height - height) / 2.0 a = np.array([[-deltaw, -deltah], [deltaw, deltah]]) return Bbox(self._points + a) def padded(self, p): """ Return a new :class:`Bbox` that is padded on all four sides by the given value. """ points = self.get_points() return Bbox(points + [[-p, -p], [p, p]]) def translated(self, tx, ty): """ Return a copy of the :class:`Bbox`, statically translated by *tx* and *ty*. """ return Bbox(self._points + (tx, ty)) def corners(self): """ Return an array of points which are the four corners of this rectangle. For example, if this :class:`Bbox` is defined by the points (*a*, *b*) and (*c*, *d*), :meth:`corners` returns (*a*, *b*), (*a*, *d*), (*c*, *b*) and (*c*, *d*). """ l, b, r, t = self.get_points().flatten() return np.array([[l, b], [l, t], [r, b], [r, t]]) def rotated(self, radians): """ Return a new bounding box that bounds a rotated version of this bounding box by the given radians. The new bounding box is still aligned with the axes, of course. """ corners = self.corners() corners_rotated = Affine2D().rotate(radians).transform(corners) bbox = Bbox.unit() bbox.update_from_data_xy(corners_rotated, ignore=True) return bbox @staticmethod def union(bboxes): """ Return a :class:`Bbox` that contains all of the given bboxes. """ assert(len(bboxes)) if len(bboxes) == 1: return bboxes[0] x0 = np.inf y0 = np.inf x1 = -np.inf y1 = -np.inf for bbox in bboxes: points = bbox.get_points() xs = points[:, 0] ys = points[:, 1] x0 = min(x0, np.min(xs)) y0 = min(y0, np.min(ys)) x1 = max(x1, np.max(xs)) y1 = max(y1, np.max(ys)) return Bbox.from_extents(x0, y0, x1, y1) class Bbox(BboxBase): """ A mutable bounding box. """ def __init__(self, points, **kwargs): """ *points*: a 2x2 numpy array of the form [[x0, y0], [x1, y1]] If you need to create a :class:`Bbox` object from another form of data, consider the static methods :meth:`unit`, :meth:`from_bounds` and :meth:`from_extents`. """ BboxBase.__init__(self, **kwargs) self._points = np.asarray(points, np.float_) self._minpos = np.array([0.0000001, 0.0000001]) self._ignore = True # it is helpful in some contexts to know if the bbox is a # default or has been mutated; we store the orig points to # support the mutated methods self._points_orig = self._points.copy() if DEBUG: ___init__ = __init__ def __init__(self, points, **kwargs): self._check(points) self.___init__(points, **kwargs) def invalidate(self): self._check(self._points) TransformNode.invalidate(self) _unit_values = np.array([[0.0, 0.0], [1.0, 1.0]], np.float_) @staticmethod def unit(): """ (staticmethod) Create a new unit :class:`Bbox` from (0, 0) to (1, 1). """ return Bbox(Bbox._unit_values.copy()) @staticmethod def from_bounds(x0, y0, width, height): """ (staticmethod) Create a new :class:`Bbox` from *x0*, *y0*, *width* and *height*. *width* and *height* may be negative. """ return Bbox.from_extents(x0, y0, x0 + width, y0 + height) @staticmethod def from_extents(*args): """ (staticmethod) Create a new Bbox from *left*, *bottom*, *right* and *top*. The *y*-axis increases upwards. """ points = np.array(args, dtype=np.float_).reshape(2, 2) return Bbox(points) def __repr__(self): return 'Bbox(%r)' % repr(self._points) def ignore(self, value): """ Set whether the existing bounds of the box should be ignored by subsequent calls to :meth:`update_from_data` or :meth:`update_from_data_xy`. *value*: - When True, subsequent calls to :meth:`update_from_data` will ignore the existing bounds of the :class:`Bbox`. - When False, subsequent calls to :meth:`update_from_data` will include the existing bounds of the :class:`Bbox`. """ self._ignore = value def update_from_data(self, x, y, ignore=None): """ Update the bounds of the :class:`Bbox` based on the passed in data. After updating, the bounds will have positive *width* and *height*; *x0* and *y0* will be the minimal values. *x*: a numpy array of *x*-values *y*: a numpy array of *y*-values *ignore*: - when True, ignore the existing bounds of the :class:`Bbox`. - when False, include the existing bounds of the :class:`Bbox`. - when None, use the last value passed to :meth:`ignore`. """ warnings.warn( "update_from_data requires a memory copy -- please replace with " "update_from_data_xy") xy = np.hstack((x.reshape((len(x), 1)), y.reshape((len(y), 1)))) return self.update_from_data_xy(xy, ignore) def update_from_path(self, path, ignore=None, updatex=True, updatey=True): """ Update the bounds of the :class:`Bbox` based on the passed in data. After updating, the bounds will have positive *width* and *height*; *x0* and *y0* will be the minimal values. *path*: a :class:`~matplotlib.path.Path` instance *ignore*: - when True, ignore the existing bounds of the :class:`Bbox`. - when False, include the existing bounds of the :class:`Bbox`. - when None, use the last value passed to :meth:`ignore`. *updatex*: when True, update the x values *updatey*: when True, update the y values """ if ignore is None: ignore = self._ignore if path.vertices.size == 0: return points, minpos, changed = update_path_extents( path, None, self._points, self._minpos, ignore) if changed: self.invalidate() if updatex: self._points[:, 0] = points[:, 0] self._minpos[0] = minpos[0] if updatey: self._points[:, 1] = points[:, 1] self._minpos[1] = minpos[1] def update_from_data_xy(self, xy, ignore=None, updatex=True, updatey=True): """ Update the bounds of the :class:`Bbox` based on the passed in data. After updating, the bounds will have positive *width* and *height*; *x0* and *y0* will be the minimal values. *xy*: a numpy array of 2D points *ignore*: - when True, ignore the existing bounds of the :class:`Bbox`. - when False, include the existing bounds of the :class:`Bbox`. - when None, use the last value passed to :meth:`ignore`. *updatex*: when True, update the x values *updatey*: when True, update the y values """ if len(xy) == 0: return path = Path(xy) self.update_from_path(path, ignore=ignore, updatex=updatex, updatey=updatey) def _set_x0(self, val): self._points[0, 0] = val self.invalidate() x0 = property(BboxBase._get_x0, _set_x0) def _set_y0(self, val): self._points[0, 1] = val self.invalidate() y0 = property(BboxBase._get_y0, _set_y0) def _set_x1(self, val): self._points[1, 0] = val self.invalidate() x1 = property(BboxBase._get_x1, _set_x1) def _set_y1(self, val): self._points[1, 1] = val self.invalidate() y1 = property(BboxBase._get_y1, _set_y1) def _set_p0(self, val): self._points[0] = val self.invalidate() p0 = property(BboxBase._get_p0, _set_p0) def _set_p1(self, val): self._points[1] = val self.invalidate() p1 = property(BboxBase._get_p1, _set_p1) def _set_intervalx(self, interval): self._points[:, 0] = interval self.invalidate() intervalx = property(BboxBase._get_intervalx, _set_intervalx) def _set_intervaly(self, interval): self._points[:, 1] = interval self.invalidate() intervaly = property(BboxBase._get_intervaly, _set_intervaly) def _set_bounds(self, bounds): l, b, w, h = bounds points = np.array([[l, b], [l + w, b + h]], np.float_) if np.any(self._points != points): self._points = points self.invalidate() bounds = property(BboxBase._get_bounds, _set_bounds) def _get_minpos(self): return self._minpos minpos = property(_get_minpos) def _get_minposx(self): return self._minpos[0] minposx = property(_get_minposx) def _get_minposy(self): return self._minpos[1] minposy = property(_get_minposy) def get_points(self): """ Get the points of the bounding box directly as a numpy array of the form: [[x0, y0], [x1, y1]]. """ self._invalid = 0 return self._points def set_points(self, points): """ Set the points of the bounding box directly from a numpy array of the form: [[x0, y0], [x1, y1]]. No error checking is performed, as this method is mainly for internal use. """ if np.any(self._points != points): self._points = points self.invalidate() def set(self, other): """ Set this bounding box from the "frozen" bounds of another :class:`Bbox`. """ if np.any(self._points != other.get_points()): self._points = other.get_points() self.invalidate() def mutated(self): 'return whether the bbox has changed since init' return self.mutatedx() or self.mutatedy() def mutatedx(self): 'return whether the x-limits have changed since init' return (self._points[0, 0] != self._points_orig[0, 0] or self._points[1, 0] != self._points_orig[1, 0]) def mutatedy(self): 'return whether the y-limits have changed since init' return (self._points[0, 1] != self._points_orig[0, 1] or self._points[1, 1] != self._points_orig[1, 1]) class TransformedBbox(BboxBase): """ A :class:`Bbox` that is automatically transformed by a given transform. When either the child bounding box or transform changes, the bounds of this bbox will update accordingly. """ def __init__(self, bbox, transform, **kwargs): """ *bbox*: a child :class:`Bbox` *transform*: a 2D :class:`Transform` """ assert bbox.is_bbox assert isinstance(transform, Transform) assert transform.input_dims == 2 assert transform.output_dims == 2 BboxBase.__init__(self, **kwargs) self._bbox = bbox self._transform = transform self.set_children(bbox, transform) self._points = None def __repr__(self): return "TransformedBbox(%r, %r)" % (self._bbox, self._transform) def get_points(self): if self._invalid: points = self._transform.transform(self._bbox.get_points()) points = np.ma.filled(points, 0.0) self._points = points self._invalid = 0 return self._points get_points.__doc__ = Bbox.get_points.__doc__ if DEBUG: _get_points = get_points def get_points(self): points = self._get_points() self._check(points) return points class Transform(TransformNode): """ The base class of all :class:`TransformNode` instances that actually perform a transformation. All non-affine transformations should be subclasses of this class. New affine transformations should be subclasses of :class:`Affine2D`. Subclasses of this class should override the following members (at minimum): - :attr:`input_dims` - :attr:`output_dims` - :meth:`transform` - :attr:`is_separable` - :attr:`has_inverse` - :meth:`inverted` (if :attr:`has_inverse` is True) If the transform needs to do something non-standard with :class:`matplotlib.path.Path` objects, such as adding curves where there were once line segments, it should override: - :meth:`transform_path` """ input_dims = None """ The number of input dimensions of this transform. Must be overridden (with integers) in the subclass. """ output_dims = None """ The number of output dimensions of this transform. Must be overridden (with integers) in the subclass. """ has_inverse = False """True if this transform has a corresponding inverse transform.""" is_separable = False """True if this transform is separable in the x- and y- dimensions.""" def __add__(self, other): """ Composes two transforms together such that *self* is followed by *other*. """ if isinstance(other, Transform): return composite_transform_factory(self, other) raise TypeError( "Can not add Transform to object of type '%s'" % type(other)) def __radd__(self, other): """ Composes two transforms together such that *self* is followed by *other*. """ if isinstance(other, Transform): return composite_transform_factory(other, self) raise TypeError( "Can not add Transform to object of type '%s'" % type(other)) def __eq__(self, other): # equality is based on transform object id. Hence: # Transform() != Transform(). # Some classes, such as TransformWrapper & AffineBase, will override. return self is other def _iter_break_from_left_to_right(self): """ Returns an iterator breaking down this transform stack from left to right recursively. If self == ((A, N), A) then the result will be an iterator which yields I : ((A, N), A), followed by A : (N, A), followed by (A, N) : (A), but not ((A, N), A) : I. This is equivalent to flattening the stack then yielding ``flat_stack[:i], flat_stack[i:]`` where i=0..(n-1). """ yield IdentityTransform(), self @property def depth(self): """ Returns the number of transforms which have been chained together to form this Transform instance. .. note:: For the special case of a Composite transform, the maximum depth of the two is returned. """ return 1 def contains_branch(self, other): """ Return whether the given transform is a sub-tree of this transform. This routine uses transform equality to identify sub-trees, therefore in many situations it is object id which will be used. For the case where the given transform represents the whole of this transform, returns True. """ if self.depth < other.depth: return False # check that a subtree is equal to other (starting from self) for _, sub_tree in self._iter_break_from_left_to_right(): if sub_tree == other: return True return False def contains_branch_seperately(self, other_transform): """ Returns whether the given branch is a sub-tree of this transform on each seperate dimension. A common use for this method is to identify if a transform is a blended transform containing an axes' data transform. e.g.:: x_isdata, y_isdata = trans.contains_branch_seperately(ax.transData) """ if self.output_dims != 2: raise ValueError('contains_branch_seperately only supports ' 'transforms with 2 output dimensions') # for a non-blended transform each seperate dimension is the same, so # just return the appropriate shape. return [self.contains_branch(other_transform)] * 2 def __sub__(self, other): """ Returns a transform stack which goes all the way down self's transform stack, and then ascends back up other's stack. If it can, this is optimised:: # normally A - B == a + b.inverted() # sometimes, when A contains the tree B there is no need to # descend all the way down to the base of A (via B), instead we # can just stop at B. (A + B) - (B)^-1 == A # similarly, when B contains tree A, we can avoid decending A at # all, basically: A - (A + B) == ((B + A) - A).inverted() or B^-1 For clarity, the result of ``(A + B) - B + B == (A + B)``. """ # we only know how to do this operation if other is a Transform. if not isinstance(other, Transform): return NotImplemented for remainder, sub_tree in self._iter_break_from_left_to_right(): if sub_tree == other: return remainder for remainder, sub_tree in other._iter_break_from_left_to_right(): if sub_tree == self: if not remainder.has_inverse: raise ValueError("The shortcut cannot be computed since " "other's transform includes a non-invertable component.") return remainder.inverted() # if we have got this far, then there was no shortcut possible if other.has_inverse: return self + other.inverted() else: raise ValueError('It is not possible to compute transA - transB ' 'since transB cannot be inverted and there is no ' 'shortcut possible.') def __array__(self, *args, **kwargs): """ Array interface to get at this Transform's affine matrix. """ return self.get_affine().get_matrix() def transform(self, values): """ Performs the transformation on the given array of values. Accepts a numpy array of shape (N x :attr:`input_dims`) and returns a numpy array of shape (N x :attr:`output_dims`). """ return self.transform_affine(self.transform_non_affine(values)) def transform_affine(self, values): """ Performs only the affine part of this transformation on the given array of values. ``transform(values)`` is always equivalent to ``transform_affine(transform_non_affine(values))``. In non-affine transformations, this is generally a no-op. In affine transformations, this is equivalent to ``transform(values)``. Accepts a numpy array of shape (N x :attr:`input_dims`) and returns a numpy array of shape (N x :attr:`output_dims`). """ return self.get_affine().transform(values) def transform_non_affine(self, values): """ Performs only the non-affine part of the transformation. ``transform(values)`` is always equivalent to ``transform_affine(transform_non_affine(values))``. In non-affine transformations, this is generally equivalent to ``transform(values)``. In affine transformations, this is always a no-op. Accepts a numpy array of shape (N x :attr:`input_dims`) and returns a numpy array of shape (N x :attr:`output_dims`). """ return values def get_affine(self): """ Get the affine part of this transform. """ return IdentityTransform() def get_matrix(self): """ Get the Affine transformation array for the affine part of this transform. """ return self.get_affine().get_matrix() def transform_point(self, point): """ A convenience function that returns the transformed copy of a single point. The point is given as a sequence of length :attr:`input_dims`. The transformed point is returned as a sequence of length :attr:`output_dims`. """ assert len(point) == self.input_dims return self.transform(np.asarray([point]))[0] def transform_path(self, path): """ Returns a transformed path. *path*: a :class:`~matplotlib.path.Path` instance. In some cases, this transform may insert curves into the path that began as line segments. """ return self.transform_path_affine(self.transform_path_non_affine(path)) def transform_path_affine(self, path): """ Returns a path, transformed only by the affine part of this transform. *path*: a :class:`~matplotlib.path.Path` instance. ``transform_path(path)`` is equivalent to ``transform_path_affine(transform_path_non_affine(values))``. """ return self.get_affine().transform_path_affine(path) def transform_path_non_affine(self, path): """ Returns a path, transformed only by the non-affine part of this transform. *path*: a :class:`~matplotlib.path.Path` instance. ``transform_path(path)`` is equivalent to ``transform_path_affine(transform_path_non_affine(values))``. """ return Path(self.transform_non_affine(path.vertices), path.codes, path._interpolation_steps) def transform_angles(self, angles, pts, radians=False, pushoff=1e-5): """ Performs transformation on a set of angles anchored at specific locations. The *angles* must be a column vector (i.e., numpy array). The *pts* must be a two-column numpy array of x,y positions (angle transforms currently only work in 2D). This array must have the same number of rows as *angles*. *radians* indicates whether or not input angles are given in radians (True) or degrees (False; the default). *pushoff* is the distance to move away from *pts* for determining transformed angles (see discussion of method below). The transformed angles are returned in an array with the same size as *angles*. The generic version of this method uses a very generic algorithm that transforms *pts*, as well as locations very close to *pts*, to find the angle in the transformed system. """ # Must be 2D if self.input_dims != 2 or self.output_dims != 2: raise NotImplementedError('Only defined in 2D') # pts must be array with 2 columns for x,y assert pts.shape[1] == 2 # angles must be a column vector and have same number of # rows as pts assert np.prod(angles.shape) == angles.shape[0] == pts.shape[0] # Convert to radians if desired if not radians: angles = angles / 180.0 * np.pi # Move a short distance away pts2 = pts + pushoff * np.c_[np.cos(angles), np.sin(angles)] # Transform both sets of points tpts = self.transform(pts) tpts2 = self.transform(pts2) # Calculate transformed angles d = tpts2 - tpts a = np.arctan2(d[:, 1], d[:, 0]) # Convert back to degrees if desired if not radians: a = a * 180.0 / np.pi return a def inverted(self): """ Return the corresponding inverse transformation. The return value of this method should be treated as temporary. An update to *self* does not cause a corresponding update to its inverted copy. ``x === self.inverted().transform(self.transform(x))`` """ raise NotImplementedError() class TransformWrapper(Transform): """ A helper class that holds a single child transform and acts equivalently to it. This is useful if a node of the transform tree must be replaced at run time with a transform of a different type. This class allows that replacement to correctly trigger invalidation. Note that :class:`TransformWrapper` instances must have the same input and output dimensions during their entire lifetime, so the child transform may only be replaced with another child transform of the same dimensions. """ pass_through = True def __init__(self, child): """ *child*: A class:`Transform` instance. This child may later be replaced with :meth:`set`. """ assert isinstance(child, Transform) Transform.__init__(self) self.input_dims = child.input_dims self.output_dims = child.output_dims self._set(child) self._invalid = 0 def __eq__(self, other): return self._child.__eq__(other) if DEBUG: def __str__(self): return str(self._child) def __getstate__(self): # only store the child return {'child': self._child} def __setstate__(self, state): # re-initialise the TransformWrapper with the state's child self.__init__(state['child']) def __repr__(self): return "TransformWrapper(%r)" % self._child def frozen(self): return self._child.frozen() frozen.__doc__ = Transform.frozen.__doc__ def _set(self, child): self._child = child self.set_children(child) self.transform = child.transform self.transform_affine = child.transform_affine self.transform_non_affine = child.transform_non_affine self.transform_path = child.transform_path self.transform_path_affine = child.transform_path_affine self.transform_path_non_affine = child.transform_path_non_affine self.get_affine = child.get_affine self.inverted = child.inverted self.get_matrix = child.get_matrix # note we do not wrap other properties here since the transform's # child can be changed with WrappedTransform.set and so checking # is_affine and other such properties may be dangerous. def set(self, child): """ Replace the current child of this transform with another one. The new child must have the same number of input and output dimensions as the current child. """ assert child.input_dims == self.input_dims assert child.output_dims == self.output_dims self._set(child) self._invalid = 0 self.invalidate() self._invalid = 0 def _get_is_affine(self): return self._child.is_affine is_affine = property(_get_is_affine) def _get_is_separable(self): return self._child.is_separable is_separable = property(_get_is_separable) def _get_has_inverse(self): return self._child.has_inverse has_inverse = property(_get_has_inverse) class AffineBase(Transform): """ The base class of all affine transformations of any number of dimensions. """ is_affine = True def __init__(self, *args, **kwargs): Transform.__init__(self, *args, **kwargs) self._inverted = None def __array__(self, *args, **kwargs): # optimises the access of the transform matrix vs the superclass return self.get_matrix() @staticmethod def _concat(a, b): """ Concatenates two transformation matrices (represented as numpy arrays) together. """ return np.dot(b, a) def __eq__(self, other): if other.is_affine: return np.all(self.get_matrix() == other.get_matrix()) return NotImplemented def transform(self, values): return self.transform_affine(values) transform.__doc__ = Transform.transform.__doc__ def transform_affine(self, values): raise NotImplementedError('Affine subclasses should override this ' 'method.') transform_affine.__doc__ = Transform.transform_affine.__doc__ def transform_non_affine(self, points): return points transform_non_affine.__doc__ = Transform.transform_non_affine.__doc__ def transform_path(self, path): return self.transform_path_affine(path) transform_path.__doc__ = Transform.transform_path.__doc__ def transform_path_affine(self, path): return Path(self.transform_affine(path.vertices), path.codes, path._interpolation_steps) transform_path_affine.__doc__ = Transform.transform_path_affine.__doc__ def transform_path_non_affine(self, path): return path transform_path_non_affine.__doc__ = Transform.transform_path_non_affine.__doc__ def get_affine(self): return self get_affine.__doc__ = Transform.get_affine.__doc__ class Affine2DBase(AffineBase): """ The base class of all 2D affine transformations. 2D affine transformations are performed using a 3x3 numpy array:: a c e b d f 0 0 1 This class provides the read-only interface. For a mutable 2D affine transformation, use :class:`Affine2D`. Subclasses of this class will generally only need to override a constructor and :meth:`get_matrix` that generates a custom 3x3 matrix. """ has_inverse = True input_dims = 2 output_dims = 2 def frozen(self): return Affine2D(self.get_matrix().copy()) frozen.__doc__ = AffineBase.frozen.__doc__ def _get_is_separable(self): mtx = self.get_matrix() return mtx[0, 1] == 0.0 and mtx[1, 0] == 0.0 is_separable = property(_get_is_separable) def to_values(self): """ Return the values of the matrix as a sequence (a,b,c,d,e,f) """ mtx = self.get_matrix() return tuple(mtx[:2].swapaxes(0, 1).flatten()) @staticmethod def matrix_from_values(a, b, c, d, e, f): """ (staticmethod) Create a new transformation matrix as a 3x3 numpy array of the form:: a c e b d f 0 0 1 """ return np.array([[a, c, e], [b, d, f], [0.0, 0.0, 1.0]], np.float_) def transform_affine(self, points): mtx = self.get_matrix() if isinstance(points, MaskedArray): tpoints = affine_transform(points.data, mtx) return ma.MaskedArray(tpoints, mask=ma.getmask(points)) return affine_transform(points, mtx) def transform_point(self, point): mtx = self.get_matrix() return affine_transform(point, mtx) transform_point.__doc__ = AffineBase.transform_point.__doc__ if DEBUG: _transform_affine = transform_affine def transform_affine(self, points): # The major speed trap here is just converting to the # points to an array in the first place. If we can use # more arrays upstream, that should help here. if (not ma.isMaskedArray(points) and not isinstance(points, np.ndarray)): warnings.warn( ('A non-numpy array of type %s was passed in for ' + 'transformation. Please correct this.') % type(points)) return self._transform_affine(points) transform_affine.__doc__ = AffineBase.transform_affine.__doc__ def inverted(self): if self._inverted is None or self._invalid: mtx = self.get_matrix() shorthand_name = None if self._shorthand_name: shorthand_name = '(%s)-1' % self._shorthand_name self._inverted = Affine2D(inv(mtx), shorthand_name=shorthand_name) self._invalid = 0 return self._inverted inverted.__doc__ = AffineBase.inverted.__doc__ class Affine2D(Affine2DBase): """ A mutable 2D affine transformation. """ def __init__(self, matrix=None, **kwargs): """ Initialize an Affine transform from a 3x3 numpy float array:: a c e b d f 0 0 1 If *matrix* is None, initialize with the identity transform. """ Affine2DBase.__init__(self, **kwargs) if matrix is None: matrix = np.identity(3) elif DEBUG: matrix = np.asarray(matrix, np.float_) assert matrix.shape == (3, 3) self._mtx = matrix self._invalid = 0 def __repr__(self): return "Affine2D(%s)" % repr(self._mtx) # def __cmp__(self, other): # # XXX redundant. this only tells us eq. # if (isinstance(other, Affine2D) and # (self.get_matrix() == other.get_matrix()).all()): # return 0 # return -1 @staticmethod def from_values(a, b, c, d, e, f): """ (staticmethod) Create a new Affine2D instance from the given values:: a c e b d f 0 0 1 . """ return Affine2D( np.array([a, c, e, b, d, f, 0.0, 0.0, 1.0], np.float_) .reshape((3, 3))) def get_matrix(self): """ Get the underlying transformation matrix as a 3x3 numpy array:: a c e b d f 0 0 1 . """ self._invalid = 0 return self._mtx def set_matrix(self, mtx): """ Set the underlying transformation matrix from a 3x3 numpy array:: a c e b d f 0 0 1 . """ self._mtx = mtx self.invalidate() def set(self, other): """ Set this transformation from the frozen copy of another :class:`Affine2DBase` object. """ assert isinstance(other, Affine2DBase) self._mtx = other.get_matrix() self.invalidate() @staticmethod def identity(): """ (staticmethod) Return a new :class:`Affine2D` object that is the identity transform. Unless this transform will be mutated later on, consider using the faster :class:`IdentityTransform` class instead. """ return Affine2D(np.identity(3)) def clear(self): """ Reset the underlying matrix to the identity transform. """ self._mtx = np.identity(3) self.invalidate() return self def rotate(self, theta): """ Add a rotation (in radians) to this transform in place. Returns *self*, so this method can easily be chained with more calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate` and :meth:`scale`. """ a = np.cos(theta) b = np.sin(theta) rotate_mtx = np.array( [[a, -b, 0.0], [b, a, 0.0], [0.0, 0.0, 1.0]], np.float_) self._mtx = np.dot(rotate_mtx, self._mtx) self.invalidate() return self def rotate_deg(self, degrees): """ Add a rotation (in degrees) to this transform in place. Returns *self*, so this method can easily be chained with more calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate` and :meth:`scale`. """ return self.rotate(degrees * np.pi / 180.) def rotate_around(self, x, y, theta): """ Add a rotation (in radians) around the point (x, y) in place. Returns *self*, so this method can easily be chained with more calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate` and :meth:`scale`. """ return self.translate(-x, -y).rotate(theta).translate(x, y) def rotate_deg_around(self, x, y, degrees): """ Add a rotation (in degrees) around the point (x, y) in place. Returns *self*, so this method can easily be chained with more calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate` and :meth:`scale`. """ return self.translate(-x, -y).rotate_deg(degrees).translate(x, y) def translate(self, tx, ty): """ Adds a translation in place. Returns *self*, so this method can easily be chained with more calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate` and :meth:`scale`. """ translate_mtx = np.array( [[1.0, 0.0, tx], [0.0, 1.0, ty], [0.0, 0.0, 1.0]], np.float_) self._mtx = np.dot(translate_mtx, self._mtx) self.invalidate() return self def scale(self, sx, sy=None): """ Adds a scale in place. If *sy* is None, the same scale is applied in both the *x*- and *y*-directions. Returns *self*, so this method can easily be chained with more calls to :meth:`rotate`, :meth:`rotate_deg`, :meth:`translate` and :meth:`scale`. """ if sy is None: sy = sx scale_mtx = np.array( [[sx, 0.0, 0.0], [0.0, sy, 0.0], [0.0, 0.0, 1.0]], np.float_) self._mtx = np.dot(scale_mtx, self._mtx) self.invalidate() return self def _get_is_separable(self): mtx = self.get_matrix() return mtx[0, 1] == 0.0 and mtx[1, 0] == 0.0 is_separable = property(_get_is_separable) class IdentityTransform(Affine2DBase): """ A special class that does on thing, the identity transform, in a fast way. """ _mtx = np.identity(3) def frozen(self): return self frozen.__doc__ = Affine2DBase.frozen.__doc__ def __repr__(self): return "IdentityTransform()" def get_matrix(self): return self._mtx get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__ def transform(self, points): return points transform.__doc__ = Affine2DBase.transform.__doc__ transform_affine = transform transform_affine.__doc__ = Affine2DBase.transform_affine.__doc__ transform_non_affine = transform transform_non_affine.__doc__ = Affine2DBase.transform_non_affine.__doc__ def transform_path(self, path): return path transform_path.__doc__ = Affine2DBase.transform_path.__doc__ transform_path_affine = transform_path transform_path_affine.__doc__ = Affine2DBase.transform_path_affine.__doc__ transform_path_non_affine = transform_path transform_path_non_affine.__doc__ = Affine2DBase.transform_path_non_affine.__doc__ def get_affine(self): return self get_affine.__doc__ = Affine2DBase.get_affine.__doc__ inverted = get_affine inverted.__doc__ = Affine2DBase.inverted.__doc__ class BlendedGenericTransform(Transform): """ A "blended" transform uses one transform for the *x*-direction, and another transform for the *y*-direction. This "generic" version can handle any given child transform in the *x*- and *y*-directions. """ input_dims = 2 output_dims = 2 is_separable = True pass_through = True def __init__(self, x_transform, y_transform, **kwargs): """ Create a new "blended" transform using *x_transform* to transform the *x*-axis and *y_transform* to transform the *y*-axis. You will generally not call this constructor directly but use the :func:`blended_transform_factory` function instead, which can determine automatically which kind of blended transform to create. """ # Here we ask: "Does it blend?" Transform.__init__(self, **kwargs) self._x = x_transform self._y = y_transform self.set_children(x_transform, y_transform) self._affine = None def __eq__(self, other): # Note, this is an exact copy of BlendedAffine2D.__eq__ if isinstance(other, (BlendedAffine2D, BlendedGenericTransform)): return (self._x == other._x) and (self._y == other._y) elif self._x == self._y: return self._x == other else: return NotImplemented def contains_branch_seperately(self, transform): # Note, this is an exact copy of BlendedAffine2D.contains_branch_seperately return self._x.contains_branch(transform), self._y.contains_branch(transform) @property def depth(self): return max([self._x.depth, self._y.depth]) def contains_branch(self, other): # a blended transform cannot possibly contain a branch from two different transforms. return False def _get_is_affine(self): return self._x.is_affine and self._y.is_affine is_affine = property(_get_is_affine) def _get_has_inverse(self): return self._x.has_inverse and self._y.has_inverse has_inverse = property(_get_has_inverse) def frozen(self): return blended_transform_factory(self._x.frozen(), self._y.frozen()) frozen.__doc__ = Transform.frozen.__doc__ def __repr__(self): return "BlendedGenericTransform(%s,%s)" % (self._x, self._y) def transform_non_affine(self, points): if self._x.is_affine and self._y.is_affine: return points x = self._x y = self._y if x == y and x.input_dims == 2: return x.transform_non_affine(points) if x.input_dims == 2: x_points = x.transform_non_affine(points)[:, 0:1] else: x_points = x.transform_non_affine(points[:, 0]) x_points = x_points.reshape((len(x_points), 1)) if y.input_dims == 2: y_points = y.transform_non_affine(points)[:, 1:] else: y_points = y.transform_non_affine(points[:, 1]) y_points = y_points.reshape((len(y_points), 1)) if isinstance(x_points, MaskedArray) or isinstance(y_points, MaskedArray): return ma.concatenate((x_points, y_points), 1) else: return np.concatenate((x_points, y_points), 1) transform_non_affine.__doc__ = Transform.transform_non_affine.__doc__ def inverted(self): return BlendedGenericTransform(self._x.inverted(), self._y.inverted()) inverted.__doc__ = Transform.inverted.__doc__ def get_affine(self): if self._invalid or self._affine is None: if self._x == self._y: self._affine = self._x.get_affine() else: x_mtx = self._x.get_affine().get_matrix() y_mtx = self._y.get_affine().get_matrix() # This works because we already know the transforms are # separable, though normally one would want to set b and # c to zero. mtx = np.vstack((x_mtx[0], y_mtx[1], [0.0, 0.0, 1.0])) self._affine = Affine2D(mtx) self._invalid = 0 return self._affine get_affine.__doc__ = Transform.get_affine.__doc__ class BlendedAffine2D(Affine2DBase): """ A "blended" transform uses one transform for the *x*-direction, and another transform for the *y*-direction. This version is an optimization for the case where both child transforms are of type :class:`Affine2DBase`. """ is_separable = True def __init__(self, x_transform, y_transform, **kwargs): """ Create a new "blended" transform using *x_transform* to transform the *x*-axis and *y_transform* to transform the *y*-axis. Both *x_transform* and *y_transform* must be 2D affine transforms. You will generally not call this constructor directly but use the :func:`blended_transform_factory` function instead, which can determine automatically which kind of blended transform to create. """ assert x_transform.is_affine assert y_transform.is_affine assert x_transform.is_separable assert y_transform.is_separable Transform.__init__(self, **kwargs) self._x = x_transform self._y = y_transform self.set_children(x_transform, y_transform) Affine2DBase.__init__(self) self._mtx = None def __eq__(self, other): # Note, this is an exact copy of BlendedGenericTransform.__eq__ if isinstance(other, (BlendedAffine2D, BlendedGenericTransform)): return (self._x == other._x) and (self._y == other._y) elif self._x == self._y: return self._x == other else: return NotImplemented def contains_branch_seperately(self, transform): # Note, this is an exact copy of BlendedTransform.contains_branch_seperately return self._x.contains_branch(transform), self._y.contains_branch(transform) def __repr__(self): return "BlendedAffine2D(%s,%s)" % (self._x, self._y) def get_matrix(self): if self._invalid: if self._x == self._y: self._mtx = self._x.get_matrix() else: x_mtx = self._x.get_matrix() y_mtx = self._y.get_matrix() # This works because we already know the transforms are # separable, though normally one would want to set b and # c to zero. self._mtx = np.vstack((x_mtx[0], y_mtx[1], [0.0, 0.0, 1.0])) self._inverted = None self._invalid = 0 return self._mtx get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__ def blended_transform_factory(x_transform, y_transform): """ Create a new "blended" transform using *x_transform* to transform the *x*-axis and *y_transform* to transform the *y*-axis. A faster version of the blended transform is returned for the case where both child transforms are affine. """ if (isinstance(x_transform, Affine2DBase) and isinstance(y_transform, Affine2DBase)): return BlendedAffine2D(x_transform, y_transform) return BlendedGenericTransform(x_transform, y_transform) class CompositeGenericTransform(Transform): """ A composite transform formed by applying transform *a* then transform *b*. This "generic" version can handle any two arbitrary transformations. """ pass_through = True def __init__(self, a, b, **kwargs): """ Create a new composite transform that is the result of applying transform *a* then transform *b*. You will generally not call this constructor directly but use the :func:`composite_transform_factory` function instead, which can automatically choose the best kind of composite transform instance to create. """ assert a.output_dims == b.input_dims self.input_dims = a.input_dims self.output_dims = b.output_dims Transform.__init__(self, **kwargs) self._a = a self._b = b self.set_children(a, b) is_affine = property(lambda self: self._a.is_affine and self._b.is_affine) def frozen(self): self._invalid = 0 frozen = composite_transform_factory(self._a.frozen(), self._b.frozen()) if not isinstance(frozen, CompositeGenericTransform): return frozen.frozen() return frozen frozen.__doc__ = Transform.frozen.__doc__ def _invalidate_internal(self, value, invalidating_node): # In some cases for a composite transform, an invalidating call to AFFINE_ONLY needs # to be extended to invalidate the NON_AFFINE part too. These cases are when the right # hand transform is non-affine and either: # (a) the left hand transform is non affine # (b) it is the left hand node which has triggered the invalidation if value == Transform.INVALID_AFFINE \ and not self._b.is_affine \ and (not self._a.is_affine or invalidating_node is self._a): value = Transform.INVALID Transform._invalidate_internal(self, value=value, invalidating_node=invalidating_node) def __eq__(self, other): if isinstance(other, (CompositeGenericTransform, CompositeAffine2D)): return self is other or (self._a == other._a and self._b == other._b) else: return False def _iter_break_from_left_to_right(self): for lh_compliment, rh_compliment in self._a._iter_break_from_left_to_right(): yield lh_compliment, rh_compliment + self._b for lh_compliment, rh_compliment in self._b._iter_break_from_left_to_right(): yield self._a + lh_compliment, rh_compliment @property def depth(self): return self._a.depth + self._b.depth def _get_is_affine(self): return self._a.is_affine and self._b.is_affine is_affine = property(_get_is_affine) def _get_is_separable(self): return self._a.is_separable and self._b.is_separable is_separable = property(_get_is_separable) if DEBUG: def __str__(self): return '(%s, %s)' % (self._a, self._b) def __repr__(self): return "CompositeGenericTransform(%r, %r)" % (self._a, self._b) def transform_affine(self, points): return self.get_affine().transform(points) transform_affine.__doc__ = Transform.transform_affine.__doc__ def transform_non_affine(self, points): if self._a.is_affine and self._b.is_affine: return points elif not self._a.is_affine and self._b.is_affine: return self._a.transform_non_affine(points) else: return self._b.transform_non_affine( self._a.transform(points)) transform_non_affine.__doc__ = Transform.transform_non_affine.__doc__ def transform_path_non_affine(self, path): if self._a.is_affine and self._b.is_affine: return path elif not self._a.is_affine and self._b.is_affine: return self._a.transform_path_non_affine(path) else: return self._b.transform_path_non_affine( self._a.transform_path(path)) transform_path_non_affine.__doc__ = Transform.transform_path_non_affine.__doc__ def get_affine(self): if not self._b.is_affine: return self._b.get_affine() else: return Affine2D(np.dot(self._b.get_affine().get_matrix(), self._a.get_affine().get_matrix())) get_affine.__doc__ = Transform.get_affine.__doc__ def inverted(self): return CompositeGenericTransform(self._b.inverted(), self._a.inverted()) inverted.__doc__ = Transform.inverted.__doc__ def _get_has_inverse(self): return self._a.has_inverse and self._b.has_inverse has_inverse = property(_get_has_inverse) class CompositeAffine2D(Affine2DBase): """ A composite transform formed by applying transform *a* then transform *b*. This version is an optimization that handles the case where both *a* and *b* are 2D affines. """ def __init__(self, a, b, **kwargs): """ Create a new composite transform that is the result of applying transform *a* then transform *b*. Both *a* and *b* must be instances of :class:`Affine2DBase`. You will generally not call this constructor directly but use the :func:`composite_transform_factory` function instead, which can automatically choose the best kind of composite transform instance to create. """ assert a.output_dims == b.input_dims self.input_dims = a.input_dims self.output_dims = b.output_dims assert a.is_affine assert b.is_affine Affine2DBase.__init__(self, **kwargs) self._a = a self._b = b self.set_children(a, b) self._mtx = None if DEBUG: def __str__(self): return '(%s, %s)' % (self._a, self._b) @property def depth(self): return self._a.depth + self._b.depth def _iter_break_from_left_to_right(self): for lh_compliment, rh_compliment in self._a._iter_break_from_left_to_right(): yield lh_compliment, rh_compliment + self._b for lh_compliment, rh_compliment in self._b._iter_break_from_left_to_right(): yield self._a + lh_compliment, rh_compliment def __repr__(self): return "CompositeAffine2D(%r, %r)" % (self._a, self._b) def get_matrix(self): if self._invalid: self._mtx = np.dot( self._b.get_matrix(), self._a.get_matrix()) self._inverted = None self._invalid = 0 return self._mtx get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__ def composite_transform_factory(a, b): """ Create a new composite transform that is the result of applying transform a then transform b. Shortcut versions of the blended transform are provided for the case where both child transforms are affine, or one or the other is the identity transform. Composite transforms may also be created using the '+' operator, e.g.:: c = a + b """ # check to see if any of a or b are IdentityTransforms. We use # isinstance here to guarantee that the transforms will *always* # be IdentityTransforms. Since TransformWrappers are mutable, # use of equality here would be wrong. if isinstance(a, IdentityTransform): return b elif isinstance(b, IdentityTransform): return a elif isinstance(a, Affine2D) and isinstance(b, Affine2D): return CompositeAffine2D(a, b) return CompositeGenericTransform(a, b) class BboxTransform(Affine2DBase): """ :class:`BboxTransform` linearly transforms points from one :class:`Bbox` to another :class:`Bbox`. """ is_separable = True def __init__(self, boxin, boxout, **kwargs): """ Create a new :class:`BboxTransform` that linearly transforms points from *boxin* to *boxout*. """ assert boxin.is_bbox assert boxout.is_bbox Affine2DBase.__init__(self, **kwargs) self._boxin = boxin self._boxout = boxout self.set_children(boxin, boxout) self._mtx = None self._inverted = None def __repr__(self): return "BboxTransform(%r, %r)" % (self._boxin, self._boxout) def get_matrix(self): if self._invalid: inl, inb, inw, inh = self._boxin.bounds outl, outb, outw, outh = self._boxout.bounds x_scale = outw / inw y_scale = outh / inh if DEBUG and (x_scale == 0 or y_scale == 0): raise ValueError("Transforming from or to a singular bounding box.") self._mtx = np.array([[x_scale, 0.0 , (-inl*x_scale+outl)], [0.0 , y_scale, (-inb*y_scale+outb)], [0.0 , 0.0 , 1.0 ]], np.float_) self._inverted = None self._invalid = 0 return self._mtx get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__ class BboxTransformTo(Affine2DBase): """ :class:`BboxTransformTo` is a transformation that linearly transforms points from the unit bounding box to a given :class:`Bbox`. """ is_separable = True def __init__(self, boxout, **kwargs): """ Create a new :class:`BboxTransformTo` that linearly transforms points from the unit bounding box to *boxout*. """ assert boxout.is_bbox Affine2DBase.__init__(self, **kwargs) self._boxout = boxout self.set_children(boxout) self._mtx = None self._inverted = None def __repr__(self): return "BboxTransformTo(%r)" % (self._boxout) def get_matrix(self): if self._invalid: outl, outb, outw, outh = self._boxout.bounds if DEBUG and (outw == 0 or outh == 0): raise ValueError("Transforming to a singular bounding box.") self._mtx = np.array([[outw, 0.0, outl], [ 0.0, outh, outb], [ 0.0, 0.0, 1.0]], np.float_) self._inverted = None self._invalid = 0 return self._mtx get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__ class BboxTransformToMaxOnly(BboxTransformTo): """ :class:`BboxTransformTo` is a transformation that linearly transforms points from the unit bounding box to a given :class:`Bbox` with a fixed upper left of (0, 0). """ def __repr__(self): return "BboxTransformToMaxOnly(%r)" % (self._boxout) def get_matrix(self): if self._invalid: xmax, ymax = self._boxout.max if DEBUG and (xmax == 0 or ymax == 0): raise ValueError("Transforming to a singular bounding box.") self._mtx = np.array([[xmax, 0.0, 0.0], [ 0.0, ymax, 0.0], [ 0.0, 0.0, 1.0]], np.float_) self._inverted = None self._invalid = 0 return self._mtx get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__ class BboxTransformFrom(Affine2DBase): """ :class:`BboxTransformFrom` linearly transforms points from a given :class:`Bbox` to the unit bounding box. """ is_separable = True def __init__(self, boxin, **kwargs): assert boxin.is_bbox Affine2DBase.__init__(self, **kwargs) self._boxin = boxin self.set_children(boxin) self._mtx = None self._inverted = None def __repr__(self): return "BboxTransformFrom(%r)" % (self._boxin) def get_matrix(self): if self._invalid: inl, inb, inw, inh = self._boxin.bounds if DEBUG and (inw == 0 or inh == 0): raise ValueError("Transforming from a singular bounding box.") x_scale = 1.0 / inw y_scale = 1.0 / inh self._mtx = np.array([[x_scale, 0.0 , (-inl*x_scale)], [0.0 , y_scale, (-inb*y_scale)], [0.0 , 0.0 , 1.0 ]], np.float_) self._inverted = None self._invalid = 0 return self._mtx get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__ class ScaledTranslation(Affine2DBase): """ A transformation that translates by *xt* and *yt*, after *xt* and *yt* have been transformad by the given transform *scale_trans*. """ def __init__(self, xt, yt, scale_trans, **kwargs): Affine2DBase.__init__(self, **kwargs) self._t = (xt, yt) self._scale_trans = scale_trans self.set_children(scale_trans) self._mtx = None self._inverted = None def __repr__(self): return "ScaledTranslation(%r)" % (self._t,) def get_matrix(self): if self._invalid: xt, yt = self._scale_trans.transform_point(self._t) self._mtx = np.array([[1.0, 0.0, xt], [0.0, 1.0, yt], [0.0, 0.0, 1.0]], np.float_) self._invalid = 0 self._inverted = None return self._mtx get_matrix.__doc__ = Affine2DBase.get_matrix.__doc__ class TransformedPath(TransformNode): """ A :class:`TransformedPath` caches a non-affine transformed copy of the :class:`~matplotlib.path.Path`. This cached copy is automatically updated when the non-affine part of the transform changes. .. note:: Paths are considered immutable by this class. Any update to the path's vertices/codes will not trigger a transform recomputation. """ def __init__(self, path, transform): """ Create a new :class:`TransformedPath` from the given :class:`~matplotlib.path.Path` and :class:`Transform`. """ assert isinstance(transform, Transform) TransformNode.__init__(self) self._path = path self._transform = transform self.set_children(transform) self._transformed_path = None self._transformed_points = None def _revalidate(self): # only recompute if the invalidation includes the non_affine part of the transform if ((self._invalid & self.INVALID_NON_AFFINE == self.INVALID_NON_AFFINE) or self._transformed_path is None): self._transformed_path = \ self._transform.transform_path_non_affine(self._path) self._transformed_points = \ Path(self._transform.transform_non_affine(self._path.vertices), None, self._path._interpolation_steps) self._invalid = 0 def get_transformed_points_and_affine(self): """ Return a copy of the child path, with the non-affine part of the transform already applied, along with the affine part of the path necessary to complete the transformation. Unlike :meth:`get_transformed_path_and_affine`, no interpolation will be performed. """ self._revalidate() return self._transformed_points, self.get_affine() def get_transformed_path_and_affine(self): """ Return a copy of the child path, with the non-affine part of the transform already applied, along with the affine part of the path necessary to complete the transformation. """ self._revalidate() return self._transformed_path, self.get_affine() def get_fully_transformed_path(self): """ Return a fully-transformed copy of the child path. """ self._revalidate() return self._transform.transform_path_affine(self._transformed_path) def get_affine(self): return self._transform.get_affine() def nonsingular(vmin, vmax, expander=0.001, tiny=1e-15, increasing=True): ''' Modify the endpoints of a range as needed to avoid singularities. *vmin*, *vmax* the initial endpoints. *tiny* threshold for the ratio of the interval to the maximum absolute value of its endpoints. If the interval is smaller than this, it will be expanded. This value should be around 1e-15 or larger; otherwise the interval will be approaching the double precision resolution limit. *expander* fractional amount by which *vmin* and *vmax* are expanded if the original interval is too small, based on *tiny*. *increasing*: [True | False] If True (default), swap *vmin*, *vmax* if *vmin* > *vmax* Returns *vmin*, *vmax*, expanded and/or swapped if necessary. If either input is inf or NaN, or if both inputs are 0, returns -*expander*, *expander*. ''' if (not np.isfinite(vmin)) or (not np.isfinite(vmax)): return -expander, expander swapped = False if vmax < vmin: vmin, vmax = vmax, vmin swapped = True if vmax - vmin <= max(abs(vmin), abs(vmax)) * tiny: if vmax == 0 and vmin == 0: vmin = -expander vmax = expander else: vmin -= expander*abs(vmin) vmax += expander*abs(vmax) if swapped and not increasing: vmin, vmax = vmax, vmin return vmin, vmax def interval_contains(interval, val): a, b = interval return ( ((a < b) and (a <= val and b >= val)) or (b <= val and a >= val)) def interval_contains_open(interval, val): a, b = interval return ( ((a < b) and (a < val and b > val)) or (b < val and a > val)) def offset_copy(trans, fig=None, x=0.0, y=0.0, units='inches'): ''' Return a new transform with an added offset. args: trans is any transform kwargs: fig is the current figure; it can be None if units are 'dots' x, y give the offset units is 'inches', 'points' or 'dots' ''' if units == 'dots': return trans + Affine2D().translate(x, y) if fig is None: raise ValueError('For units of inches or points a fig kwarg is needed') if units == 'points': x /= 72.0 y /= 72.0 elif not units == 'inches': raise ValueError('units must be dots, points, or inches') return trans + ScaledTranslation(x, y, fig.dpi_scale_trans)
lthurlow/Network-Grapher
proj/external/matplotlib-1.2.1/lib/matplotlib/transforms.py
Python
mit
88,425
"""Contains the drivers and interface code for pinball machines which use the Multimorphic R-ROC hardware controllers. This code can be used with P-ROC driver boards, or with Stern SAM, Stern Whitestar, Williams WPC, or Williams WPC95 driver boards. Much of this code is from the P-ROC drivers section of the pyprocgame project, written by Adam Preble and Gerry Stellenberg. It was originally released under the MIT license and is released here under the MIT License. More info on the P-ROC hardware platform: http://pinballcontrollers.com/ Original code source on which this module was based: https://github.com/preble/pyprocgame If you want to use the Mission Pinball Framework with P-ROC hardware, you also need libpinproc and pypinproc. More info: http://www.pinballcontrollers.com/forum/index.php?board=10.0 """ # p_roc.py # Mission Pinball Framework # Written by Brian Madden & Gabe Knuth # Released under the MIT License. (See license info at the end of this file.) # Documentation and more info at http://missionpinball.com/mpf import logging import re import time import sys from copy import deepcopy try: import pinproc pinproc_imported = True except: pinproc_imported = False from mpf.system.platform import Platform from mpf.system.utility_functions import Util proc_output_module = 3 proc_pdb_bus_addr = 0xC00 # driverboards = ['wpc', 'wpc95', 'sternSAM', 'sternWhitestar'] class HardwarePlatform(Platform): """Platform class for the P-ROC hardware controller. Args: machine: The MachineController instance. Attributes: machine: The MachineController instance. proc: The P-ROC pinproc.PinPROC device. machine_type: Constant of the pinproc.MachineType """ def __init__(self, machine): super(HardwarePlatform, self).__init__(machine) self.log = logging.getLogger('P-ROC') self.log.debug("Configuring P-ROC hardware") if not pinproc_imported: self.log.error('Could not import "pinproc". Most likely you do not ' 'have libpinproc and/or pypinproc installed. You can' ' run MPF in software-only "virtual" mode by using ' 'the -x command like option for now instead.') sys.exit() # ---------------------------------------------------------------------- # Platform-specific hardware features. WARNING: Do not edit these. They # are based on what the P-ROC hardware can and cannot do. self.features['max_pulse'] = 255 self.features['hw_timer'] = False self.features['hw_rule_coil_delay'] = False self.features['variable_recycle_time'] = False self.features['variable_debounce_time'] = False self.features['hw_led_fade'] = True # todo need to add differences between patter and pulsed_patter # Make the platform features available to everyone self.machine.config['platform'] = self.features # ---------------------------------------------------------------------- self.machine_type = pinproc.normalize_machine_type( self.machine.config['hardware']['driverboards']) # Connect to the P-ROC. Keep trying if it doesn't work the first time. self.proc = None self.log.info("Connecting to P-ROC") while not self.proc: try: self.proc = pinproc.PinPROC(self.machine_type) self.proc.reset(1) except IOError: print "Retrying..." self.log.info("Successfully connected to P-ROC") # Clear out the default program for the aux port since we might need it # for a 9th column. Details: # http://www.pinballcontrollers.com/forum/index.php?topic=1360 commands = [] commands += [pinproc.aux_command_disable()] for i in range(1, 255): commands += [pinproc.aux_command_jump(0)] self.proc.aux_send_commands(0, commands) # End of the clear out the default program for the aux port. # Because PDBs can be configured in many different ways, we need to # traverse the YAML settings to see how many PDBs are being used. # Then we can configure the P-ROC appropriately to use those PDBs. # Only then can we relate the YAML coil/light #'s to P-ROC numbers for # the collections. if self.machine_type == pinproc.MachineTypePDB: self.log.debug("Configuring P-ROC for PDBs (P-ROC driver boards)") self.pdbconfig = PDBConfig(self.proc, self.machine.config) else: self.log.debug("Configuring P-ROC for OEM driver boards") self.polarity = self.machine_type == pinproc.MachineTypeSternWhitestar\ or self.machine_type == pinproc.MachineTypeSternSAM\ or self.machine_type == pinproc.MachineTypePDB def __repr__(self): return '<Platform.P-ROC>' def configure_driver(self, config, device_type='coil'): """Creates a P-ROC driver. Typically drivers are coils or flashers, but for the P-ROC this is also used for matrix-based lights. Args: config: Dictionary of settings for the driver. device_type: String with value of either 'coil' or 'switch'. Returns: A reference to the PROCDriver object which is the actual object you can use to pulse(), patter(), enable(), etc. """ # todo need to add Aux Bus support # todo need to add virtual driver support for driver counts > 256 # Find the P-ROC number for each driver. For P-ROC driver boards, the # P-ROC number is specified via the Ax-By-C format. For OEM driver # boards configured via driver numbers, libpinproc's decode() method # can provide the number. if self.machine_type == pinproc.MachineTypePDB: proc_num = self.pdbconfig.get_proc_number(device_type, str(config['number'])) if proc_num == -1: self.log.error("Coil cannot be controlled by the P-ROC. " "Ignoring.") return else: proc_num = pinproc.decode(self.machine_type, str(config['number'])) if device_type in ['coil', 'flasher']: proc_driver_object = PROCDriver(proc_num, self.proc, config, self.machine) elif device_type == 'light': proc_driver_object = PROCMatrixLight(proc_num, self.proc) if 'polarity' in config: state = proc_driver_object.proc.driver_get_state(config['number']) state['polarity'] = config['polarity'] proc_driver_object.proc.driver_update_state(state) return proc_driver_object, config['number'] def configure_switch(self, config): """Configures a P-ROC switch. Args: config: Dictionary of settings for the switch. In the case of the P-ROC, it uses the following: number : The number (or number string) for the switch as specified in the machine configuration file. debounce : Boolean which specifies whether the P-ROC should debounce this switch first before sending open and close notifications to the host computer. Returns: switch : A reference to the switch object that was just created. proc_num : Integer of the actual hardware switch number the P-ROC uses to refer to this switch. Typically your machine configuration files would specify a switch number like `SD12` or `7/5`. This `proc_num` is an int between 0 and 255. state : An integer of the current hardware state of the switch, used to set the initial state state in the machine. A value of 0 means the switch is open, and 1 means it's closed. Note this state is the physical state of the switch, so if you configure the switch to be normally-closed (i.e. "inverted" then your code will have to invert it too.) MPF handles this automatically if the switch type is 'NC'. """ if self.machine_type == pinproc.MachineTypePDB: proc_num = self.pdbconfig.get_proc_number('switch', str(config['number'])) if config['number'] == -1: self.log.error("Switch cannot be controlled by the P-ROC. " "Ignoring.") return else: proc_num = pinproc.decode(self.machine_type, str(config['number'])) switch = PROCSwitch(proc_num) # The P-ROC needs to be configured to notify the host computers of # switch events. (That notification can be for open or closed, # debounced or nondebounced.) self.log.debug("Configuring switch's host notification settings. P-ROC" "number: %s, debounce: %s", proc_num, config['debounce']) if config['debounce'] is False or \ proc_num >= pinproc.SwitchNeverDebounceFirst: self.proc.switch_update_rule(proc_num, 'closed_nondebounced', {'notifyHost': True, 'reloadActive': False}, [], False) self.proc.switch_update_rule(proc_num, 'open_nondebounced', {'notifyHost': True, 'reloadActive': False}, [], False) else: self.proc.switch_update_rule(proc_num, 'closed_debounced', {'notifyHost': True, 'reloadActive': False}, [], False) self.proc.switch_update_rule(proc_num, 'open_debounced', {'notifyHost': True, 'reloadActive': False}, [], False) return switch, proc_num def get_hw_switch_states(self): # Read in and set the initial switch state # The P-ROC uses the following values for hw switch states: # 1 - closed (debounced) # 2 - open (debounced) # 3 - closed (not debounced) # 4 - open (not debounced) states = self.proc.switch_get_states() for switch, state in enumerate(states): if state == 3 or state == 1: states[switch] = 1 else: states[switch] = 0 return states def configure_led(self, config): """ Configures a P-ROC RGB LED controlled via a PD-LED.""" # todo add polarity # split the number (which comes in as a string like w-x-y-z) into parts config['number'] = config['number_str'].split('-') if 'polarity' in config: invert = not config['polarity'] else: invert = False return PDBLED(board=int(config['number'][0]), address=[int(config['number'][1]), int(config['number'][2]), int(config['number'][3])], proc_driver=self.proc, invert=invert) def configure_matrixlight(self, config): """Configures a P-ROC matrix light.""" # On the P-ROC, matrix lights are drivers return self.configure_driver(config, 'light') def configure_gi(self, config): """Configures a P-ROC GI string light.""" # On the P-ROC, GI strings are drivers return self.configure_driver(config, 'light') def configure_dmd(self): """Configures a hardware DMD connected to a classic P-ROC.""" return PROCDMD(self.proc, self.machine) def tick(self): """Checks the P-ROC for any events (switch state changes or notification that a DMD frame was updated). Also tickles the watchdog and flushes any queued commands to the P-ROC. """ # Get P-ROC events (switches & DMD frames displayed) for event in self.proc.get_events(): event_type = event['type'] event_value = event['value'] if event_type == 99: # CTRL-C to quit todo does this go here? self.machine.quit() elif event_type == pinproc.EventTypeDMDFrameDisplayed: pass elif event_type == pinproc.EventTypeSwitchClosedDebounced: self.machine.switch_controller.process_switch(state=1, num=event_value) elif event_type == pinproc.EventTypeSwitchOpenDebounced: self.machine.switch_controller.process_switch(state=0, num=event_value) elif event_type == pinproc.EventTypeSwitchClosedNondebounced: self.machine.switch_controller.process_switch(state=1, num=event_value, debounced=False) elif event_type == pinproc.EventTypeSwitchOpenNondebounced: self.machine.switch_controller.process_switch(state=0, num=event_value, debounced=False) else: self.log.warning("Received unrecognized event from the P-ROC. " "Type: %s, Value: %s", event_type, event_value) self.proc.watchdog_tickle() self.proc.flush() def write_hw_rule(self, switch_obj, sw_activity, driver_obj, driver_action, disable_on_release, drive_now, **driver_settings_overrides): driver_settings = deepcopy(driver_obj.hw_driver.driver_settings) driver_settings.update(driver_obj.hw_driver.merge_driver_settings( **driver_settings_overrides)) self.log.debug("Setting HW Rule. Switch: %s, Switch_action: %s, Driver:" " %s, Driver action: %s. Driver settings: %s", switch_obj.name, sw_activity, driver_obj.name, driver_action, driver_settings) if 'debounced' in driver_settings_overrides: if driver_settings_overrides['debounced']: debounced = True else: debounced = False elif switch_obj.config['debounce']: debounced = True else: debounced = False # Note the P-ROC uses a 125ms non-configurable recycle time. So any # non-zero value passed here will enable the 125ms recycle. # PinPROC calls this "reload active" (it's an "active reload timer") reload_active = False if driver_settings['recycle_ms']: reload_active = True # We only want to notify_host for debounced switch events. We use non- # debounced for hw_rules since they're faster, but we don't want to # notify the host on them since the host would then get two events # one for the nondebounced followed by one for the debounced. notify_host = False if debounced: notify_host = True rule = {'notifyHost': notify_host, 'reloadActive': reload_active} # Now let's figure out what type of P-ROC action we need to take. invert_switch_for_disable = False proc_actions = set() if driver_action == 'pulse': if (driver_settings['pwm_on_ms'] and driver_settings['pwm_off_ms']): proc_actions.add('pulsed_patter') pulse_ms = driver_settings['pulse_ms'] pwm_on = driver_settings['pwm_on_ms'] pwm_off = driver_settings['pwm_off_ms'] else: proc_actions.add('pulse') pulse_ms = driver_settings['pulse_ms'] if disable_on_release: proc_actions.add('disable') invert_switch_for_disable = True elif driver_action == 'hold': if (driver_settings['pwm_on_ms'] and driver_settings['pwm_off_ms']): proc_actions.add('patter') pulse_ms = driver_settings['pulse_ms'] pwm_on = driver_settings['pwm_on_ms'] pwm_off = driver_settings['pwm_off_ms'] else: proc_actions.add('enable') if disable_on_release: proc_actions.add('disable') invert_switch_for_disable = True elif driver_action == 'disable': proc_actions.add('disable') for proc_action in proc_actions: this_driver = list() this_sw_activity = sw_activity # The P-ROC ties hardware rules to switches, with a list of linked # drivers that should change state based on a switch activity. # Since MPF applies the rules one-at-a-time, we have to read the # existing linked drivers from the hardware for that switch, add # our new driver to the list, then re-update the rule on the hw. if proc_action == 'pulse': this_driver = [pinproc.driver_state_pulse( driver_obj.hw_driver.state(), pulse_ms)] elif proc_action == 'patter': this_driver = [pinproc.driver_state_patter( driver_obj.hw_driver.state(), pwm_on, pwm_off, pulse_ms, True)] # todo above param True should not be there. Change to now? elif proc_action == 'enable': this_driver = [pinproc.driver_state_pulse( driver_obj.hw_driver.state(), 0)] elif proc_action == 'disable': if invert_switch_for_disable: this_sw_activity ^= 1 this_driver = [pinproc.driver_state_disable( driver_obj.hw_driver.state())] elif proc_action == 'pulsed_patter': this_driver = [pinproc.driver_state_pulsed_patter( driver_obj.hw_driver.state(), pwm_on, pwm_off, pulse_ms)] if this_sw_activity == 0 and debounced: event_type = "open_debounced" elif this_sw_activity == 0 and not debounced: event_type = "open_nondebounced" elif this_sw_activity == 1 and debounced: event_type = "closed_debounced" else: # if sw_activity == 1 and not debounced: event_type = "closed_nondebounced" # merge in any previously-configured driver rules for this switch final_driver = list(this_driver) # need to make an actual copy sw_rule_string = str(switch_obj.name)+str(event_type) if sw_rule_string in self.hw_switch_rules: for driver in self.hw_switch_rules[sw_rule_string]: final_driver.append(driver) self.hw_switch_rules[sw_rule_string].extend(this_driver) else: self.hw_switch_rules[sw_rule_string] = this_driver self.log.debug("Writing HW rule for switch: %s, driver: %s, event_type: %s, " "rule: %s, final_driver: %s, drive now: %s", switch_obj.name, driver_obj.name, event_type, rule, final_driver, drive_now) self.proc.switch_update_rule(switch_obj.number, event_type, rule, final_driver, drive_now) def clear_hw_rule(self, sw_name): """Clears a hardware rule. This is used if you want to remove the linkage between a switch and some driver activity. For example, if you wanted to disable your flippers (so that a player pushing the flipper buttons wouldn't cause the flippers to flip), you'd call this method with your flipper button as the *sw_num*. Args: sw_num : Int of the number of the switch whose rule you want to clear. """ sw_num = self.machine.switches[sw_name].number self.log.debug("Clearing HW rule for switch: %s", sw_num) self.proc.switch_update_rule(sw_num, 'open_nondebounced', {'notifyHost': False, 'reloadActive': False}, []) self.proc.switch_update_rule(sw_num, 'closed_nondebounced', {'notifyHost': False, 'reloadActive': False}, []) self.proc.switch_update_rule(sw_num, 'open_debounced', {'notifyHost': True, 'reloadActive': False}, []) self.proc.switch_update_rule(sw_num, 'closed_debounced', {'notifyHost': True, 'reloadActive': False}, []) for entry in self.hw_switch_rules.keys(): # slice for copy if entry.startswith(self.machine.switches.number(sw_num).name): # disable any drivers from this rule which are active now # todo make this an option? for driver_dict in self.hw_switch_rules[entry]: self.proc.driver_disable(driver_dict['driverNum']) # Remove this rule from our list del self.hw_switch_rules[entry] # todo need to read in the notifyHost settings and reapply those # appropriately. class PDBLED(object): """Represents an RGB LED connected to a PD-LED board.""" def __init__(self, board, address, proc_driver, invert=False): self.log = logging.getLogger('PDBLED') self.board = board self.address = address self.proc = proc_driver self.invert = invert # todo make sure self.address is a 3-element list self.log.debug("Creating PD-LED item: board: %s, " "RGB outputs: %s", self.board, self.address) def color(self, color): """Instantly sets this LED to the color passed. Args: color: a 3-item list of integers representing R, G, and B values, 0-255 each. """ #self.log.debug("Setting Color. Board: %s, Address: %s, Color: %s", # self.board, self.address, color) self.proc.led_color(self.board, self.address[0], self.normalize_color(color[0])) self.proc.led_color(self.board, self.address[1], self.normalize_color(color[1])) self.proc.led_color(self.board, self.address[2], self.normalize_color(color[2])) def fade(self, color, fade_ms): # todo # not implemented. For now we'll just immediately set the color self.color(color, fade_ms) def disable(self): """Disables (turns off) this LED instantly. For multi-color LEDs it turns all elements off. """ self.proc.led_color(self.board, self.address[0], self.normalize_color(0)) self.proc.led_color(self.board, self.address[1], self.normalize_color(0)) self.proc.led_color(self.board, self.address[2], self.normalize_color(0)) def enable(self): """Enables (turns on) this LED instantly. For multi-color LEDs it turns all elements on. """ self.color(self.normalize_color(255), self.normalize_color(255), self.normalize_color(255) ) def normalize_color(self, color): if self.invert: return 255-color else: return color class PDBSwitch(object): """Base class for switches connected to a P-ROC.""" def __init__(self, pdb, number_str): upper_str = number_str.upper() if upper_str.startswith('SD'): self.sw_type = 'dedicated' self.sw_number = int(upper_str[2:]) elif '/' in upper_str: self.sw_type = 'matrix' self.sw_number = self.parse_matrix_num(upper_str) else: self.sw_type = 'proc' self.sw_number = int(number_str) def proc_num(self): return self.sw_number def parse_matrix_num(self, num_str): cr_list = num_str.split('/') return 32 + int(cr_list[0])*16 + int(cr_list[1]) class PDBCoil(object): """Base class for coils connected to a P-ROC that are controlled via P-ROC driver boards (i.e. the PD-16 board). """ def __init__(self, pdb, number_str): self.pdb = pdb upper_str = number_str.upper() if self.is_direct_coil(upper_str): self.coil_type = 'dedicated' self.banknum = (int(number_str[1:]) - 1)/8 self.outputnum = int(number_str[1:]) elif self.is_pdb_coil(number_str): self.coil_type = 'pdb' (self.boardnum, self.banknum, self.outputnum) = decode_pdb_address( number_str, self.pdb.aliases) else: self.coil_type = 'unknown' def bank(self): if self.coil_type == 'dedicated': return self.banknum elif self.coil_type == 'pdb': return self.boardnum * 2 + self.banknum else: return -1 def output(self): return self.outputnum def is_direct_coil(self, string): if len(string) < 2 or len(string) > 3: return False if not string[0] == 'C': return False if not string[1:].isdigit(): return False return True def is_pdb_coil(self, string): return is_pdb_address(string, self.pdb.aliases) class PDBLight(object): """Base class for lights connected to a PD-8x8 driver board.""" def __init__(self, pdb, number_str): self.pdb = pdb upper_str = number_str.upper() if self.is_direct_lamp(upper_str): self.lamp_type = 'dedicated' self.output = int(number_str[1:]) elif self.is_pdb_lamp(number_str): # C-Ax-By-z:R-Ax-By-z or C-x/y/z:R-x/y/z self.lamp_type = 'pdb' source_addr, sink_addr = self.split_matrix_addr_parts(number_str) (self.source_boardnum, self.source_banknum, self.source_outputnum)\ = decode_pdb_address(source_addr, self.pdb.aliases) (self.sink_boardnum, self.sink_banknum, self.sink_outputnum)\ = decode_pdb_address(sink_addr, self.pdb.aliases) else: self.lamp_type = 'unknown' def source_board(self): return self.source_boardnum def sink_board(self): return self.sink_boardnum def source_bank(self): return self.source_boardnum * 2 + self.source_banknum def sink_bank(self): return self.sink_boardnum * 2 + self.sink_banknum def source_output(self): return self.source_outputnum def sink_output(self): return self.sink_outputnum def dedicated_bank(self): return self.banknum def dedicated_output(self): return self.output def is_direct_lamp(self, string): if len(string) < 2 or len(string) > 3: return False if not string[0] == 'L': return False if not string[1:].isdigit(): return False return True def split_matrix_addr_parts(self, string): """ Input is of form C-Ax-By-z:R-Ax-By-z or C-x/y/z:R-x/y/z or aliasX:aliasY. We want to return only the address part: Ax-By-z, x/y/z, or aliasX. That is, remove the two character prefix if present. """ addrs = string.rsplit(':') if len(addrs) is not 2: return [] addrs_out = [] for addr in addrs: bits = addr.split('-') if len(bits) is 1: addrs_out.append(addr) # Append unchanged. else: # Generally this will be len(bits) 2 or 4. # Remove the first bit and rejoin. addrs_out.append('-'.join(bits[1:])) return addrs_out def is_pdb_lamp(self, string): params = self.split_matrix_addr_parts(string) if len(params) != 2: return False for addr in params: if not is_pdb_address(addr, self.pdb.aliases): return False return True class PROCSwitch(object): def __init__(self, number): self.log = logging.getLogger('PROCSwitch') self.number = number class PROCDriver(object): """ Base class for drivers connected to a P-ROC. This class is used for all drivers, regardless of whether they're connected to a P-ROC driver board (such as the PD-16 or PD-8x8) or an OEM driver board. """ def __init__(self, number, proc_driver, config, machine): self.log = logging.getLogger('PROCDriver') self.number = number self.proc = proc_driver self.driver_settings = self.create_driver_settings(machine, **config) self.driver_settings['number'] = number self.driver_settings.update(self.merge_driver_settings(**config)) self.log.debug("Driver Settings for %s: %s", self.number, self.driver_settings) def create_driver_settings(self, machine, pulse_ms=None, **kwargs): return_dict = dict() if pulse_ms is None: pulse_ms = machine.config['mpf']['default_pulse_ms'] try: return_dict['allow_enable'] = kwargs['allow_enable'] except KeyError: return_dict['allow_enable'] = False return_dict['pulse_ms'] = int(pulse_ms) return_dict['recycle_ms'] = 0 return_dict['pwm_on_ms'] = 0 return_dict['pwm_off_ms'] = 0 return return_dict def merge_driver_settings(self, pulse_ms=None, pwm_on_ms=None, pwm_off_ms=None, pulse_power=None, hold_power=None, pulse_power32=None, hold_power32=None, pulse_pwm_mask=None, hold_pwm_mask=None, recycle_ms=None, **kwargs ): if pulse_power: raise NotImplementedError('"pulse_power" has not been ' 'implemented yet') if pulse_power32: raise NotImplementedError('"pulse_power32" has not been ' 'implemented yet') if hold_power32: raise NotImplementedError('"hold_power32" has not been ' 'implemented yet') if pulse_pwm_mask: raise NotImplementedError('"pulse_pwm_mask" has not been ' 'implemented yet') if hold_pwm_mask: raise NotImplementedError('"hold_pwm_mask" has not been ' 'implemented yet') return_dict = dict() # figure out what kind of enable we need: if hold_power: return_dict['pwm_on_ms'], return_dict['pwm_off_ms'] = ( Util.pwm8_to_on_off(hold_power)) elif pwm_off_ms and pwm_on_ms: return_dict['pwm_on_ms'] = int(pwm_on_ms) return_dict['pwm_off_ms'] = int(pwm_off_ms) if pulse_ms is not None: return_dict['pulse_ms'] = int(pulse_ms) elif 'pwm_on_ms' in return_dict: return_dict['pulse_ms'] = 0 if recycle_ms and int(recycle_ms) == 125: return_dict['recycle_ms'] = 125 elif recycle_ms and recycle_ms is not None: raise ValueError('P-ROC requires recycle_ms of 0 or 125') found_pwm_on = False found_pwm_off = False if 'pwm_on_ms' in return_dict and return_dict['pwm_on_ms']: found_pwm_on = True if 'pwm_off_ms' in return_dict and return_dict['pwm_off_ms']: found_pwm_off = True if (found_pwm_off and not found_pwm_on) or ( found_pwm_on and not found_pwm_off): raise ValueError("Error: Using pwm requires both pwm_on and " "pwm_off values.") return return_dict def disable(self): """Disables (turns off) this driver.""" self.log.debug('Disabling Driver') self.proc.driver_disable(self.number) def enable(self): """Enables (turns on) this driver.""" if (self.driver_settings['pwm_on_ms'] and self.driver_settings['pwm_off_ms']): self.log.debug('Enabling. Initial pulse_ms:%s, pwm_on_ms: %s' 'pwm_off_ms: %s', self.driver_settings['pwm_on_ms'], self.driver_settings['pwm_off_ms'], self.driver_settings['pulse_ms']) self.proc.driver_patter(self.number, self.driver_settings['pwm_on_ms'], self.driver_settings['pwm_off_ms'], self.driver_settings['pulse_ms'], True) else: self.log.debug('Enabling at 100%') if not ('allow_enable' in self.driver_settings and self.driver_settings['allow_enable']): self.log.warning("Received a command to enable this coil " "without pwm, but 'allow_enable' has not been" "set to True in this coil's configuration.") return self.proc.driver_schedule(number=self.number, schedule=0xffffffff, cycle_seconds=0, now=True) def pulse(self, milliseconds=None): """Enables this driver for `milliseconds`. ``ValueError`` will be raised if `milliseconds` is outside of the range 0-255. """ if not milliseconds: milliseconds = self.driver_settings['pulse_ms'] self.log.debug('Pulsing for %sms', milliseconds) self.proc.driver_pulse(self.number, milliseconds) return milliseconds def get_pulse_ms(self): return self.driver_settings['pulse_ms'] def state(self): """Returns a dictionary representing this driver's current configuration state. """ return self.proc.driver_get_state(self.number) def tick(self): pass class PROCMatrixLight(object): def __init__(self, number, proc_driver): self.log = logging.getLogger('PROCMatrixLight') self.number = number self.proc = proc_driver def off(self): """Disables (turns off) this driver.""" self.proc.driver_disable(self.number) self.last_time_changed = time.time() def on(self, brightness=255, fade_ms=0, start=0): """Enables (turns on) this driver.""" if brightness >= 255: self.proc.driver_schedule(number=self.number, schedule=0xffffffff, cycle_seconds=0, now=True) elif brightness == 0: self.off() else: pass # patter rates of 10/1 through 2/9 self.last_time_changed = time.time() ''' Koen's fade code he posted to pinballcontrollers: def mode_tick(self): if self.fade_counter % 10 == 0: for lamp in self.game.lamps: if lamp.name.find("gi0") == -1: var = 4.0*math.sin(0.02*float(self.fade_counter)) + 5.0 on_time = 11-round(var) off_time = round(var) lamp.patter(on_time, off_time) self.fade_counter += 1 ''' class PDBConfig(object): """ This class is only used when the P-ROC is configured to use P-ROC driver boards such as the PD-16 or PD-8x8. i.e. not when it's operating in WPC or Stern mode. """ indexes = [] proc = None aliases = None # set in __init__ def __init__(self, proc, config): self.log = logging.getLogger('PDBConfig') self.log.debug("Processing P-ROC Driver Board configuration") self.proc = proc # Set config defaults if 'P_ROC' in config and 'lamp_matrix_strobe_time' \ in config['P_ROC']: self.lamp_matrix_strobe_time = int(config['P_ROC'] ['lamp_matrix_strobe_time']) else: self.lamp_matrix_strobe_time = 100 if 'P_ROC' in config and 'watchdog_time' \ in config['P_ROC']: self.watchdog_time = int(config['P_ROC'] ['watchdog_time']) else: self.watchdog_time = 1000 if 'P_ROC' in config and 'use_watchdog' \ in config['P_ROC']: self.use_watchdog = config['P_ROC']['use_watchdog'] else: self.use_watchdog = True # Initialize some lists for data collecting coil_bank_list = [] lamp_source_bank_list = [] lamp_list = [] lamp_list_for_index = [] self.aliases = [] if 'PRDriverAliases' in config: for alias_dict in config['PRDriverAliases']: alias = DriverAlias(alias_dict['expr'], alias_dict['repl']) self.aliases.append(alias) # Make a list of unique coil banks if 'coils' in config: for name in config['coils']: item_dict = config['coils'][name] coil = PDBCoil(self, str(item_dict['number'])) if coil.bank() not in coil_bank_list: coil_bank_list.append(coil.bank()) # Make a list of unique lamp source banks. The P-ROC only supports 2. # TODO: What should be done if 2 is exceeded? if 'matrix_lights' in config: for name in config['matrix_lights']: item_dict = config['matrix_lights'][name] lamp = PDBLight(self, str(item_dict['number'])) # Catalog PDB banks # Dedicated lamps don't use PDB banks. They use P-ROC direct # driver pins. if lamp.lamp_type == 'dedicated': pass elif lamp.lamp_type == 'pdb': if lamp.source_bank() not in lamp_source_bank_list: lamp_source_bank_list.append(lamp.source_bank()) # Create dicts of unique sink banks. The source index is # needed when setting up the driver groups. lamp_dict = {'source_index': lamp_source_bank_list.index( lamp.source_bank()), 'sink_bank': lamp.sink_bank(), 'source_output': lamp.source_output()} # lamp_dict_for_index. This will be used later when the # p-roc numbers are requested. The requestor won't know # the source_index, but it will know the source board. # This is why two separate lists are needed. lamp_dict_for_index = {'source_board': lamp.source_board(), 'sink_bank': lamp.sink_bank(), 'source_output': lamp.source_output()} if lamp_dict not in lamp_list: lamp_list.append(lamp_dict) lamp_list_for_index.append(lamp_dict_for_index) # Create a list of indexes. The PDB banks will be mapped into this # list. The index of the bank is used to calculate the P-ROC driver # number for each driver. num_proc_banks = pinproc.DriverCount/8 self.indexes = [99] * num_proc_banks self.initialize_drivers(proc) # Set up dedicated driver groups (groups 0-3). for group_ctr in range(0, 4): # TODO: Fix this. PDB Banks 0-3 are also interpreted as dedicated # bank here. enable = group_ctr in coil_bank_list self.log.debug("Driver group %02d (dedicated): Enable=%s", group_ctr, enable) proc.driver_update_group_config(group_ctr, 0, group_ctr, 0, 0, False, True, enable, True) group_ctr += 1 # Process lamps first. The P-ROC can only control so many drivers # directly. Since software won't have the speed to control lamp # matrixes, map the lamps first. If there aren't enough P-ROC driver # groups for coils, the overflow coils can be controlled by software # via VirtualDrivers (which should get set up automatically by this # code.) for i, lamp_dict in enumerate(lamp_list): # If the bank is 16 or higher, the P-ROC can't control it # directly. Software can't really control lamp matrixes either # (need microsecond resolution). Instead of doing crazy logic here # for a case that probably won't happen, just ignore these banks. if group_ctr >= num_proc_banks or lamp_dict['sink_bank'] >= 16: self.log.error("Lamp matrix banks can't be mapped to index " "%d because that's outside of the banks the " "P-ROC can control.", lamp_dict['sink_bank']) else: self.log.debug("Driver group %02d (lamp sink): slow_time=%d " "enable_index=%d row_activate_index=%d " "row_enable_index=%d matrix=%s", group_ctr, self.lamp_matrix_strobe_time, lamp_dict['sink_bank'], lamp_dict['source_output'], lamp_dict['source_index'], True ) self.indexes[group_ctr] = lamp_list_for_index[i] proc.driver_update_group_config(group_ctr, self.lamp_matrix_strobe_time, lamp_dict['sink_bank'], lamp_dict['source_output'], lamp_dict['source_index'], True, True, True, True) group_ctr += 1 for coil_bank in coil_bank_list: # If the bank is 16 or higher, the P-ROC can't control it directly. # Software will have do the driver logic and write any changes to # the PDB bus. Therefore, map these banks to indexes above the # P-ROC's driver count, which will force the drivers to be created # as VirtualDrivers. Appending the bank avoids conflicts when # group_ctr gets too high. if group_ctr >= num_proc_banks or coil_bank >= 16: self.log.warning("Driver group %d mapped to driver index" "outside of P-ROC control. These Drivers " "will become VirtualDrivers. Note, the " "index will not match the board/bank " "number; so software will need to request " "those values before updating the " "drivers.", coil_bank) self.indexes.append(coil_bank) else: self.log.debug("Driver group %02d: slow_time=%d Enable " "Index=%d", group_ctr, 0, coil_bank) self.indexes[group_ctr] = coil_bank proc.driver_update_group_config(group_ctr, 0, coil_bank, 0, 0, False, True, True, True) group_ctr += 1 for i in range(group_ctr, 26): self.log.debug("Driver group %02d: disabled", i) proc.driver_update_group_config(i, self.lamp_matrix_strobe_time, 0, 0, 0, False, True, False, True) # Make sure there are two indexes. If not, fill them in. while len(lamp_source_bank_list) < 2: lamp_source_bank_list.append(0) # Now set up globals. First disable them to allow the P-ROC to set up # the polarities on the Drivers. Then enable them. self.configure_globals(proc, lamp_source_bank_list, False) self.configure_globals(proc, lamp_source_bank_list, True) def initialize_drivers(self, proc): # Loop through all of the drivers, initializing them with the polarity. for i in range(0, 208): state = {'driverNum': i, 'outputDriveTime': 0, 'polarity': True, 'state': False, 'waitForFirstTimeSlot': False, 'timeslots': 0, 'patterOnTime': 0, 'patterOffTime': 0, 'patterEnable': False, 'futureEnable': False} proc.driver_update_state(state) def configure_globals(self, proc, lamp_source_bank_list, enable=True): if enable: self.log.debug("Configuring PDB Driver Globals: polarity = %s " "matrix column index 0 = %d matrix column index " "1 = %d", True, lamp_source_bank_list[0], lamp_source_bank_list[1]) proc.driver_update_global_config(enable, # Don't enable outputs yet True, # Polarity False, # N/A False, # N/A 1, # N/A lamp_source_bank_list[0], lamp_source_bank_list[1], False, # Active low rows? No False, # N/A False, # Stern? No False, # Reset watchdog trigger self.use_watchdog, # Enable watchdog self.watchdog_time) # Now set up globals proc.driver_update_global_config(True, # Don't enable outputs yet True, # Polarity False, # N/A False, # N/A 1, # N/A lamp_source_bank_list[0], lamp_source_bank_list[1], False, # Active low rows? No False, # N/A False, # Stern? No False, # Reset watchdog trigger self.use_watchdog, # Enable watchdog self.watchdog_time) def get_proc_number(self, device_type, number_str): """Returns the P-ROC number for the requested driver string. This method uses the driver string to look in the indexes list that was set up when the PDBs were configured. The resulting P-ROC index * 3 is the first driver number in the group, and the driver offset is to that. """ if device_type == 'coil': coil = PDBCoil(self, number_str) bank = coil.bank() if bank == -1: return -1 index = self.indexes.index(coil.bank()) num = index * 8 + coil.output() return num if device_type == 'light': lamp = PDBLight(self, number_str) if lamp.lamp_type == 'unknown': return -1 elif lamp.lamp_type == 'dedicated': return lamp.dedicated_output() lamp_dict_for_index = {'source_board': lamp.source_board(), 'sink_bank': lamp.sink_bank(), 'source_output': lamp.source_output()} if lamp_dict_for_index not in self.indexes: return -1 index = self.indexes.index(lamp_dict_for_index) num = index * 8 + lamp.sink_output() return num if device_type == 'switch': switch = PDBSwitch(self, number_str) num = switch.proc_num() return num class DriverAlias(object): def __init__(self, key, value): self.expr = re.compile(key) self.repl = value def matches(self, addr): return self.expr.match(addr) def decode(self, addr): return self.expr.sub(repl=self.repl, string=addr) def is_pdb_address(addr, aliases=[]): """Returne True if the given address is a valid PDB address.""" try: decode_pdb_address(addr=addr, aliases=aliases) return True except: return False def decode_pdb_address(addr, aliases=[]): """Decodes Ax-By-z or x/y/z into PDB address, bank number, and output number. Raises a ValueError exception if it is not a PDB address, otherwise returns a tuple of (addr, bank, number). """ for alias in aliases: if alias.matches(addr): addr = alias.decode(addr) break if '-' in addr: # Ax-By-z form params = addr.rsplit('-') if len(params) != 3: raise ValueError('pdb address must have 3 components') board = int(params[0][1:]) bank = int(params[1][1:]) output = int(params[2][0:]) return board, bank, output elif '/' in addr: # x/y/z form params = addr.rsplit('/') if len(params) != 3: raise ValueError('pdb address must have 3 components') board = int(params[0]) bank = int(params[1]) output = int(params[2]) return board, bank, output else: raise ValueError('PDB address delimeter (- or /) not found.') class PROCDMD(object): """Parent class for a physical DMD attached to a P-ROC. Args: proc: Reference to the MachineController's proc attribute. machine: Reference to the MachineController Attributes: dmd: Rerence to the P-ROC's DMD buffer. """ def __init__(self, proc, machine): self.proc = proc self.machine = machine self.dmd = pinproc.DMDBuffer(128, 32) # size is hardcoded here since 128x32 is all the P-ROC hw supports # dmd_timing defaults should be 250, 400, 180, 800 if 'P_ROC' in self.machine.config and ( 'dmd_timing_cycles' in self.machine.config['P_ROC']): dmd_timing = Util.string_to_list( self.machine.config['P_ROC']['dmd_timing_cycles']) dmd_timing = [int(i) for i in dmd_timing] self.proc.dmd_update_config(high_cycles=dmd_timing) self.machine.events.add_handler('timer_tick', self.tick) def update(self, data): """Updates the DMD with a new frame. Args: data: A 4096-byte raw string. """ if len(data) == 4096: self.dmd.set_data(data) else: self.machine.log.warning("Received a DMD frame of length %s instead" "of 4096. Discarding...", len(data)) def tick(self): """Updates the physical DMD with the latest frame data. Meant to be called once per machine tick. """ self.proc.dmd_draw(self.dmd) # The MIT License (MIT) # Oringal code on which this module was based: # Copyright (c) 2009-2011 Adam Preble and Gerry Stellenberg # Copyright (c) 2013-2015 Brian Madden and Gabe Knuth # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.
qcapen/mpf
mpf/platform/p_roc.py
Python
mit
55,296
#!/usr/bin/python # Try to acquire the lock that indicates which host is the master. # If we successfully acquire the lock, but retain the lock forever. (If we leave the cluster, another host can get the lock.) # # Usage: # acquire_master_lock.py <path-to-sr> import fcntl import sys import time import subprocess import os import errno import json def logger(msg): subprocess.call(["/usr/bin/logger", msg]) def mkdir(d): try: os.makedirs(d) except OSError as exc: if exc.errno == errno.EEXIST: pass else: raise if len(sys.argv) < 2: print "Usage: %s <path-to-sr>" % (sys.argv[0]) sys.exit(1) sr_path = sys.argv[1] lock_dir = "%s/.ha/master" % (sr_path) mkdir(lock_dir) lock_path = "%s/lock" % (lock_dir) p = subprocess.Popen(["/usr/bin/hostname"], stdout=subprocess.PIPE) hostname = p.stdout.readline().rstrip() logger("Trying to acquire lock %s for host '%s'..." % (lock_path, hostname)) # Try to acquire the lock fd = open(lock_path, 'w+') fcntl.flock(fd, fcntl.LOCK_EX) logger("Acquired master lock!") # We've got the lock; write to the state file (only the host owning the lock is permitted to do this) state_file = "%s/state.json.tmp" % (lock_dir) logger("Writing to %s ..." % (state_file)) o = {"master": hostname} with open(state_file, 'w') as fd_s: json.dump(o, fd_s) # Atomically replace the live state.json file os.rename("%s/state.json.tmp" % (lock_dir), "%s/state.json" % (lock_dir)) logger("Holding master lock forever") # Hold the lock forever while True: time.sleep(3600)
stefanopanella/xapi-storage-plugins
overlay/usr/libexec/xapi/cluster-stack/corosync/acquire_master_lock.py
Python
lgpl-2.1
1,526
# Importing the libraries import numpy as np import matplotlib.pyplot as plt import pandas as pd from sklearn.linear_model import LinearRegression from sklearn.preprocessing import PolynomialFeatures # Importing the dataset datas = pd.read_csv('data.csv') datas X = datas.iloc[:, 0:1].values y = datas.iloc[:, 1].values poly = PolynomialFeatures(degree = 1) X_poly = poly.fit_transform(X) poly.fit(X_poly, y) lin2 = LinearRegression() lin2.fit(X_poly, y) lin = LinearRegression() lin.fit(X, y) plt.scatter(X, y, color = 'blue') plt.plot(X, lin.predict(X), color = 'red') plt.title('Linear Regression') plt.xlabel('SF') plt.ylabel('numPersons') plt.show() plt.scatter(X, y, color = 'blue') plt.plot(X, lin2.predict(poly.fit_transform(X)), color = 'red') plt.title('Polynomial Regression') plt.xlabel('SF') plt.ylabel('numPersons') plt.show() data2 = pd.read_csv('data2.csv') newX = data2.iloc[:, 0:1].values newX lin.predict(newX) lin2.predict(poly.fit_transform(newX))
ldbc/ldbc_snb_datagen
tools/sfs/predict.py
Python
gpl-3.0
980
""" Representation and parsing of HTTP-style status + headers """ from six.moves import range from six import iteritems from warcio.utils import to_native_str, headers_to_str_headers import uuid from six.moves.urllib.parse import quote import re #================================================================= class StatusAndHeaders(object): ENCODE_HEADER_RX = re.compile(r'[=]["\']?([^;"]+)["\']?(?=[;]?)') """ Representation of parsed http-style status line and headers Status Line if first line of request/response Headers is a list of (name, value) tuples An optional protocol which appears on first line may be specified If is_http_request is true, split http verb (instead of protocol) from start of statusline """ def __init__(self, statusline, headers, protocol='', total_len=0, is_http_request=False): if is_http_request: protocol, statusline = statusline.split(' ', 1) self.statusline = statusline self.headers = headers_to_str_headers(headers) self.protocol = protocol self.total_len = total_len self.headers_buff = None def get_header(self, name, default_value=None): """ return header (name, value) if found """ name_lower = name.lower() for value in self.headers: if value[0].lower() == name_lower: return value[1] return default_value def add_header(self, name, value): self.headers.append((name, value)) def replace_header(self, name, value): """ replace header with new value or add new header return old header value, if any """ name_lower = name.lower() for index in range(len(self.headers) - 1, -1, -1): curr_name, curr_value = self.headers[index] if curr_name.lower() == name_lower: self.headers[index] = (curr_name, value) return curr_value self.headers.append((name, value)) return None def remove_header(self, name): """ Remove header (case-insensitive) return True if header removed, False otherwise """ name_lower = name.lower() for index in range(len(self.headers) - 1, -1, -1): if self.headers[index][0].lower() == name_lower: del self.headers[index] return True return False def get_statuscode(self): """ Return the statuscode part of the status response line (Assumes no protocol in the statusline) """ code = self.statusline.split(' ', 1)[0] return code def validate_statusline(self, valid_statusline): """ Check that the statusline is valid, eg. starts with a numeric code. If not, replace with passed in valid_statusline """ code = self.get_statuscode() try: code = int(code) assert(code > 0) return True except(ValueError, AssertionError): self.statusline = valid_statusline return False def add_range(self, start, part_len, total_len): """ Add range headers indicating that this a partial response """ content_range = 'bytes {0}-{1}/{2}'.format(start, start + part_len - 1, total_len) self.statusline = '206 Partial Content' self.replace_header('Content-Range', content_range) self.replace_header('Content-Length', str(part_len)) self.replace_header('Accept-Ranges', 'bytes') return self def compute_headers_buffer(self, header_filter=None): """ Set buffer representing headers """ # HTTP headers %-encoded as ascii (see to_ascii_bytes for more info) self.headers_buff = self.to_ascii_bytes(header_filter) def __repr__(self): return "StatusAndHeaders(protocol = '{0}', statusline = '{1}', \ headers = {2})".format(self.protocol, self.statusline, self.headers) def __ne__(self, other): return not (self == other) def __eq__(self, other): if not other: return False return (self.statusline == other.statusline and self.headers == other.headers and self.protocol == other.protocol) def __str__(self, exclude_list=None): return self.to_str(exclude_list) def __bool__(self): return bool(self.statusline or self.headers) __nonzero__ = __bool__ def to_str(self, filter_func=None): string = self.protocol if string and self.statusline: string += ' ' if self.statusline: string += self.statusline if string: string += '\r\n' for h in self.headers: if filter_func: h = filter_func(h) if not h: continue string += ': '.join(h) + '\r\n' return string def to_bytes(self, filter_func=None, encoding='utf-8'): return self.to_str(filter_func).encode(encoding) + b'\r\n' def to_ascii_bytes(self, filter_func=None): """ Attempt to encode the headers block as ascii If encoding fails, call percent_encode_non_ascii_headers() to encode any headers per RFCs """ try: string = self.to_str(filter_func) string = string.encode('ascii') except (UnicodeEncodeError, UnicodeDecodeError): self.percent_encode_non_ascii_headers() string = self.to_str(filter_func) string = string.encode('ascii') return string + b'\r\n' def percent_encode_non_ascii_headers(self, encoding='UTF-8'): """ Encode any headers that are not plain ascii as UTF-8 as per: https://tools.ietf.org/html/rfc8187#section-3.2.3 https://tools.ietf.org/html/rfc5987#section-3.2.2 """ def do_encode(m): return "*={0}''".format(encoding) + quote(to_native_str(m.group(1))) for index in range(len(self.headers) - 1, -1, -1): curr_name, curr_value = self.headers[index] try: # test if header is ascii encodable, no action needed curr_value.encode('ascii') except: # if single value header, (eg. no ';'), %-encode entire header if ';' not in curr_value: new_value = quote(curr_value) else: # %-encode value in ; name="value" new_value = self.ENCODE_HEADER_RX.sub(do_encode, curr_value) if new_value == curr_value: new_value = quote(curr_value) self.headers[index] = (curr_name, new_value) # act like a (case-insensitive) dictionary of headers, much like other # python http headers apis including http.client.HTTPMessage # and requests.structures.CaseInsensitiveDict get = get_header __getitem__ = get_header __setitem__ = replace_header __delitem__ = remove_header def __contains__(self, key): return bool(self[key]) #================================================================= def _strip_count(string, total_read): length = len(string) return string.rstrip(), total_read + length #================================================================= class StatusAndHeadersParser(object): """ Parser which consumes a stream support readline() to read status and headers and return a StatusAndHeaders object """ def __init__(self, statuslist, verify=True): self.statuslist = statuslist self.verify = verify def parse(self, stream, full_statusline=None): """ parse stream for status line and headers return a StatusAndHeaders object support continuation headers starting with space or tab """ # status line w newlines intact if full_statusline is None: full_statusline = stream.readline() full_statusline = self.decode_header(full_statusline) statusline, total_read = _strip_count(full_statusline, 0) headers = [] # at end of stream if total_read == 0: raise EOFError() elif not statusline: return StatusAndHeaders(statusline=statusline, headers=headers, protocol='', total_len=total_read) # validate only if verify is set if self.verify: protocol_status = self.split_prefix(statusline, self.statuslist) if not protocol_status: msg = 'Expected Status Line starting with {0} - Found: {1}' msg = msg.format(self.statuslist, statusline) raise StatusAndHeadersParserException(msg, full_statusline) else: protocol_status = statusline.split(' ', 1) line, total_read = _strip_count(self.decode_header(stream.readline()), total_read) while line: result = line.split(':', 1) if len(result) == 2: name = result[0].rstrip(' \t') value = result[1].lstrip() else: name = result[0] value = None next_line, total_read = _strip_count(self.decode_header(stream.readline()), total_read) # append continuation lines, if any while next_line and next_line.startswith((' ', '\t')): if value is not None: value += next_line next_line, total_read = _strip_count(self.decode_header(stream.readline()), total_read) if value is not None: header = (name, value) headers.append(header) line = next_line if len(protocol_status) > 1: statusline = protocol_status[1].strip() else: statusline = '' return StatusAndHeaders(statusline=statusline, headers=headers, protocol=protocol_status[0], total_len=total_read) @staticmethod def split_prefix(key, prefixs): """ split key string into prefix and remainder for first matching prefix from a list """ key_upper = key.upper() for prefix in prefixs: if key_upper.startswith(prefix): plen = len(prefix) return (key_upper[:plen], key[plen:]) @staticmethod def make_warc_id(id_=None): if not id_: id_ = uuid.uuid4() return '<urn:uuid:{0}>'.format(id_) @staticmethod def decode_header(line): try: # attempt to decode as utf-8 first return to_native_str(line, 'utf-8') except: # if fails, default to ISO-8859-1 return to_native_str(line, 'iso-8859-1') #================================================================= class StatusAndHeadersParserException(Exception): """ status + headers parsing exception """ def __init__(self, msg, statusline): super(StatusAndHeadersParserException, self).__init__(msg) self.statusline = statusline
webrecorder/warcio
warcio/statusandheaders.py
Python
apache-2.0
11,625
import sublime import sublime_plugin import os.path from .notifier import log_fail, log_info class DiscontinuationCommand(sublime_plugin.ApplicationCommand): @property def command_name(self): return "Discontinuation" def run(self): msg = "The Telerik Platform product is retired as of May 10, 2018. For more information about the discontinuation and how you can recover your apps or data, please see the full announcement here: https://www.telerik.com/platform-next-level" additionalMsg = "Telerik recommends NativeScript Sidekick (https://www.nativescript.org/nativescript-sidekick) for developing modern, cross-platform mobile apps with web technologies like JavaScript, Angular, or Vue.js, and Kinvey (https://www.kinvey.com/) for hosting critical business back-end in the cloud." log_fail(msg) log_info(additionalMsg) def is_enabled(self): return True
Icenium/appbuilder-sublime-package
app_builder/discontinuation_command.py
Python
apache-2.0
922
# _*_ coding:utf-8 _*_ from __future__ import unicode_literals from django.core.urlresolvers import reverse from django.db import models from django.contrib.auth.models import User # Create your models here. '''文章(Post)、 分类(Tag)、 标签(Category)''' class Category(models.Model): """ Djamgo要求模型必须继承models.Model类。 Category只需要一个简单的分类名name就可以了 CharField指定分类名name的数据类型,CharField是字符型, CharField的max_length参数指定其最大长度,超过这个长度的分类名就不能存入数据库。 当然Django还为我们提供了多种其他的数据类型,如日期时间类型DateTimeField、整数类型IntegerField等 """ name = models.CharField(max_length=100, verbose_name=u'分类') class Meta: verbose_name = u"分类" verbose_name_plural = verbose_name def __unicode__(self): return self.name class Tag(models.Model): """ 标签Tag也比较简单,和Category一样 再次强调一定继承models.model类!!! """ name = models.CharField(max_length=100, verbose_name=u'标签') class Meta: verbose_name = u"标签" verbose_name_plural = verbose_name def __unicode__(self): return self.name class Post(models.Model): """ 文章的数据类型表稍微复杂一点,主要是涉及的字段更多。 """ # 标题 title = models.CharField(max_length=50, verbose_name=u'标题') # 文章正文,我们使用了TextField # 存储比较短的字符串可以使用CharField,但对于文章的正文来说可能会是一大段文字,因此使用TextField来存储大段文本。 body = models.TextField() # 这两个列分别表示文章的创建时间和最后一次修改时间,存储时间的字段用DateTimeField类型。 created_time = models.DateTimeField() modified_time = models.DateTimeField() # 文章摘要,可以没有文章摘要,但默认情况下CharField要求我们必须存入数据,否则就会报错 # 指定 CharField的 blank=True 参数值后就可以允许空值了 excerpt = models.CharField(max_length=200, verbose_name=u'摘要', blank=True) # 这是分类与标签,分类与标签的模型我们已经定义在上面。 # 我们在这里把文章对应的数据库表和分类、标签对应的数据库表关联了起来,但是关联形式稍微有点不同。 # 我们规定一篇文章只能对应一个分类,但是一个分类下可以有多篇文章,所以我们使用的是 ForeignKey,即一对多的关联关系。 # 而对于标签来说,一篇文章可以有多个标签,同一个标签下也可能有多篇文章,所以我们使用 ManyToManyField,表明这是多对多的关联关系。 # 同时我们规定文章可以没有标签,因此为标签 tags 指定了 blank=True。 # 如果你对 ForeignKey、ManyToManyField 不了解,请看教程中的解释,亦可参考官方文档: # https://docs.djangoproject.com/en/1.10/topics/db/models/#relationships category = models.ForeignKey(Category) tags = models.ManyToManyField(Tag, blank=True) # 文章作者,这里 User 是从 django.contrib.auth.models 导入的。 # django.contrib.auth 是 Django 内置的应用,专门用于处理网站用户的注册、登录等流程,User 是 Django 为我们已经写好的用户模型。 # 这里我们通过 ForeignKey 把文章和 User 关联了起来。 # 因为我们规定一篇文章只能有一个作者,而一个作者可能会写多篇文章,因此这是一对多的关联关系,和 Category 类似。 author = models.ForeignKey(User, verbose_name=u'作者') class Meta: verbose_name = u"正文" verbose_name_plural = verbose_name def __unicode__(self): return self.title # 自定义 get_absolute_url 方法 # 记得从 django.urls 中导入 reverse函数 def get_absolute_url(self): return reverse('blog:detail', kwargs={'pk': self.pk}) """ author。文章作者,这里 User 是从 django.contrib.auth.models 导入的。 django.contrib.auth 是 Django 内置的应用,专门用于处理网站用户的注册、登录等流程。 其中 User 是 Django 为我们已经写好的用户模型,和我们自己编写的 Category 等类是一样的。 这里我们通过 ForeignKey 把文章和 User关联了起来,因为我们规定一篇文章只能有一个作者, 而一个作者可能会写多篇文章,因此这是一对多的关联关系,和 Category 类似。 """
TwocatWhelp/lizhen
blog/models.py
Python
gpl-3.0
4,691
import sys import json has_answers = set() for line in open(sys.argv[1]): answers = json.loads(line.strip().split("\t")[2]) if answers != []: has_answers.add(line.split("\t")[0]) for line in sys.stdin: if line.startswith("#") or line.strip() == "": continue sent = json.loads(line) if sent['sentence'] in has_answers: sys.stdout.write(line)
sivareddyg/UDepLambda
scripts/select_only_answerable_questions.py
Python
apache-2.0
387
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, 'README.txt')) as f: README = f.read() with open(os.path.join(here, 'CHANGES.txt')) as f: CHANGES = f.read() requires = [ 'pyramid', 'pyramid_chameleon', 'pyramid_debugtoolbar', 'pyramid_tm', 'SQLAlchemy', 'transaction', 'zope.sqlalchemy', 'waitress', 'pyramid_layout' ] setup(name='MyShop', version='0.0', description='MyShop', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pyramid", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web wsgi bfg pylons pyramid', packages=find_packages(), include_package_data=True, zip_safe=False, test_suite='myshop', install_requires=requires, entry_points="""\ [paste.app_factory] main = myshop:main [console_scripts] initialize_MyShop_db = myshop.scripts.initializedb:main """, )
Akagi201/learning-python
pyramid/MyShop/setup.py
Python
mit
1,213
from django import forms from django.forms.formsets import formset_factory from formsetfield.fields import FormSetField class AdultForm(forms.Form): fullname = forms.CharField() passport = forms.CharField() class ChildForm(forms.Form): fullname = forms.CharField() birth_certificate = forms.CharField() class PassengersForm(forms.Form): adults = FormSetField(formset_factory(AdultForm, extra=2)) children = FormSetField(formset_factory(ChildForm, extra=2))
yumike/django-formsetfield
example/orders/forms.py
Python
isc
491
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging import os import unittest from telemetry.core import browser_finder from telemetry.unittest import simple_mock from telemetry.unittest import options_for_unittests from telemetry.unittest import DisabledTest _ = simple_mock.DONT_CARE def _GetCredentialsPath(): # TODO: This shouldn't depend on tools/perf. credentials_path = os.path.join( os.path.dirname(__file__), '..', '..', '..', '..', 'perf', 'data', 'credentials.json') if not os.path.exists(credentials_path): return None return credentials_path class FormBasedCredentialsBackendUnitTestBase(unittest.TestCase): def setUp(self): self._credentials_type = None @DisabledTest def testRealLoginIfPossible(self): credentials_path = _GetCredentialsPath() if not credentials_path: logging.warning('Credentials file not found, skipping test.') return options = options_for_unittests.GetCopy() with browser_finder.FindBrowser(options).Create() as b: b.Start() b.credentials.credentials_path = credentials_path if not b.credentials.CanLogin(self._credentials_type): return ret = b.credentials.LoginNeeded(b.tabs[0], self._credentials_type) self.assertTrue(ret) @DisabledTest def testRealLoginWithDontOverrideProfileIfPossible(self): credentials_path = _GetCredentialsPath() if not credentials_path: logging.warning('Credentials file not found, skipping test.') return options = options_for_unittests.GetCopy() # Login once to make sure our default profile is logged in. with browser_finder.FindBrowser(options).Create() as b: b.Start() b.credentials.credentials_path = credentials_path if not b.credentials.CanLogin(self._credentials_type): return tab = b.tabs[0] # Should not be logged in, since this is a fresh credentials # instance. self.assertFalse(b.credentials.IsLoggedIn(self._credentials_type)) # Log in. ret = b.credentials.LoginNeeded(tab, self._credentials_type) # Make sure login was successful. self.assertTrue(ret) self.assertTrue(b.credentials.IsLoggedIn(self._credentials_type)) # Reset state. Now the backend thinks we're logged out, even # though we are logged in in our current browser session. This # simulates the effects of running with --dont-override-profile. b.credentials._ResetLoggedInState() # pylint: disable=W0212 # Make sure the backend thinks we're logged out. self.assertFalse(b.credentials.IsLoggedIn(self._credentials_type)) self.assertTrue(b.credentials.CanLogin(self._credentials_type)) # Attempt to login again. This should detect that we've hit # the 'logged in' page instead of the login form, and succeed # instead of timing out. ret = b.credentials.LoginNeeded(tab, self._credentials_type) # Make sure our login attempt did in fact succeed and set the # backend's internal state to 'logged in'. self.assertTrue(ret) self.assertTrue(b.credentials.IsLoggedIn(self._credentials_type)) def testLoginUsingMock(self): raise NotImplementedError() def _LoginUsingMock(self, backend, login_page_url, email_element_id, password_element_id): # pylint: disable=R0201 tab = simple_mock.MockObject() config = {'username': 'blah', 'password': 'blargh'} tab.ExpectCall('Navigate', login_page_url) tab.ExpectCall('EvaluateJavaScript', _).WillReturn(False) tab.ExpectCall('EvaluateJavaScript', _).WillReturn(True) tab.ExpectCall('EvaluateJavaScript', _).WillReturn(False) tab.ExpectCall('WaitForDocumentReadyStateToBeInteractiveOrBetter') def VerifyEmail(js): assert email_element_id in js assert 'blah' in js tab.ExpectCall('ExecuteJavaScript', _).WhenCalled(VerifyEmail) def VerifyPw(js): assert password_element_id in js assert 'largh' in js tab.ExpectCall('ExecuteJavaScript', _).WhenCalled(VerifyPw) def VerifySubmit(js): assert '.submit' in js tab.ExpectCall('ExecuteJavaScript', _).WhenCalled(VerifySubmit) # Checking for form still up. tab.ExpectCall('EvaluateJavaScript', _).WillReturn(False) backend.LoginNeeded(tab, config)
aospx-kitkat/platform_external_chromium_org
tools/telemetry/telemetry/core/chrome/form_based_credentials_backend_unittest_base.py
Python
bsd-3-clause
4,461
import timeit import unittest class TestBasicFunctions(unittest.TestCase): def test_add_feature_performance(self): print timeit.timeit( "rollout.add_feature(Feature('feature_for_all', groups=['ALL']))", setup='from pyrollout import Rollout; from pyrollout.feature import Feature; rollout = Rollout()', number=100000) def test_can_none_performance(self): print timeit.timeit( "rollout.can({'id':1}, 'feature_for_none')", setup='from pyrollout import Rollout; from pyrollout.feature import Feature; rollout = Rollout();' "rollout.add_feature(Feature('feature_for_none', groups=['NONE']))", number=100000) def test_can_all_performance(self): print timeit.timeit( "rollout.can({'id':1}, 'feature_for_all')", setup='from pyrollout import Rollout; from pyrollout.feature import Feature; rollout = Rollout();' "rollout.add_feature(Feature('feature_for_all', groups=['ALL']))", number=100000) def test_can_group_performance(self): print timeit.timeit( "rollout.can({'id':1, 'groups': ['foo', 'bar']}, 'feature_for_group')", setup='from pyrollout import Rollout; from pyrollout.feature import Feature; rollout = Rollout();' "rollout.add_feature(Feature('feature_for_group', groups=['foo']))", number=100000) def test_can_user_performance(self): print timeit.timeit( "rollout.can({'id':1}, 'feature_for_me')", setup='from pyrollout import Rollout; from pyrollout.feature import Feature; rollout = Rollout();' "rollout.add_feature(Feature('feature_for_me', users=[{'id': 1}]))", number=100000) def test_can_pct_performance(self): print timeit.timeit( "rollout.can({'id':1}, 'feature_for_pct')", setup='from pyrollout import Rollout; from pyrollout.feature import Feature; rollout = Rollout();' "rollout.add_feature(Feature('feature_for_pct', percentage=100))", number=100000)
brechin/pyrollout
tests/test_performance.py
Python
mit
2,150
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls.defaults import * from .views import GlobalOverview urlpatterns = patterns('', url(r'^$', GlobalOverview.as_view(), name='index'), )
tylertian/Openstack
openstack F/horizon/horizon/dashboards/syspanel/overview/urls.py
Python
apache-2.0
971
def foo(a, b, *c): pass x = (5,6) foo(<arg1>1, <arg2>2, <arg3>4, <arg4>*x)
asedunov/intellij-community
python/testData/paramInfo/StarredParamAndArg.py
Python
apache-2.0
80
''' Plot learning curve. From: examples/model_selection/plot_learning_curve.py in the sklearn repo ''' import numpy as np import matplotlib.pyplot as plt from sklearn.learning_curve import learning_curve def plot_learning_curve(estimator, title, X, y, ylim=None, cv=None, n_jobs=1, train_sizes=np.linspace(.1, 1.0, 10)): """ Generate a simple plot of the test and traning learning curve. Parameters ---------- estimator : object type that implements the "fit" and "predict" methods An object of that type which is cloned for each validation. title : string Title for the chart. X : array-like, shape (n_samples, n_features) Training vector, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape (n_samples) or (n_samples, n_features), optional Target relative to X for classification or regression; None for unsupervised learning. ylim : tuple, shape (ymin, ymax), optional Defines minimum and maximum yvalues plotted. cv : integer, cross-validation generator, optional If an integer is passed, it is the number of folds (defaults to 3). Specific cross-validation objects can be passed, see sklearn.cross_validation module for the list of possible objects n_jobs : integer, optional Number of jobs to run in parallel (default 1). """ plt.figure() plt.title(title) if ylim is not None: plt.ylim(*ylim) plt.xlabel("Training examples") plt.ylabel("Score") train_sizes, train_scores, test_scores = learning_curve( estimator, X, y, cv=cv, n_jobs=n_jobs, train_sizes=train_sizes) train_scores_mean = np.mean(train_scores, axis=1) train_scores_std = np.std(train_scores, axis=1) test_scores_mean = np.mean(test_scores, axis=1) test_scores_std = np.std(test_scores, axis=1) plt.grid() plt.fill_between(train_sizes, train_scores_mean - train_scores_std, train_scores_mean + train_scores_std, alpha=0.1, color="r") plt.fill_between(train_sizes, test_scores_mean - test_scores_std, test_scores_mean + test_scores_std, alpha=0.1, color="g") plt.plot(train_sizes, train_scores_mean, 'o-', color="r", label="Training score") plt.plot(train_sizes, test_scores_mean, 'o-', color="g", label="Cross-validation score") plt.legend(loc="best") return plt
e-koch/Phys-595
project_code/Machine Learning/plot_learning_curve.py
Python
mit
2,524
# Copyright (c) 2008-2010, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> # Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/master/COPYING """ unit test for the extensions.diadefslib modules """ import six import pytest import astroid from pylint.pyreverse.inspector import Linker from pylint.pyreverse.diadefslib import * from unittest_pyreverse_writer import Config, get_project def _process_classes(classes): """extract class names of a list""" return sorted([(isinstance(c.node, astroid.ClassDef), c.title) for c in classes]) def _process_relations(relations): """extract relation indices from a relation list""" result = [] for rel_type, rels in six.iteritems(relations): for rel in rels: result.append( (rel_type, rel.from_object.title, rel.to_object.title) ) result.sort() return result @pytest.fixture def HANDLER(): return DiadefsHandler(Config()) @pytest.fixture(scope='module') def PROJECT(): return get_project('data') def test_option_values(HANDLER, PROJECT): """test for ancestor, associated and module options""" df_h = DiaDefGenerator(Linker(PROJECT), HANDLER) cl_config = Config() cl_config.classes = ['Specialization'] cl_h = DiaDefGenerator(Linker(PROJECT), DiadefsHandler(cl_config) ) assert (0, 0) == df_h._get_levels() assert False == df_h.module_names assert (-1, -1) == cl_h._get_levels() assert True == cl_h.module_names for hndl in [df_h, cl_h]: hndl.config.all_ancestors = True hndl.config.all_associated = True hndl.config.module_names = True hndl._set_default_options() assert (-1, -1) == hndl._get_levels() assert True == hndl.module_names handler = DiadefsHandler( Config()) df_h = DiaDefGenerator(Linker(PROJECT), handler) cl_config = Config() cl_config.classes = ['Specialization'] cl_h = DiaDefGenerator(Linker(PROJECT), DiadefsHandler(cl_config) ) for hndl in [df_h, cl_h]: hndl.config.show_ancestors = 2 hndl.config.show_associated = 1 hndl.config.module_names = False hndl._set_default_options() assert (2, 1) == hndl._get_levels() assert False == hndl.module_names #def test_default_values(): """test efault values for package or class diagrams""" # TODO : should test difference between default values for package # or class diagrams class TestDefaultDiadefGenerator(object): def test_known_values1(self, HANDLER, PROJECT): dd = DefaultDiadefGenerator(Linker(PROJECT), HANDLER).visit(PROJECT) assert len(dd) == 2 keys = [d.TYPE for d in dd] assert keys == ['package', 'class'] pd = dd[0] assert pd.title == 'packages No Name' modules = sorted([(isinstance(m.node, astroid.Module), m.title) for m in pd.objects]) assert modules == [(True, 'data'), (True, 'data.clientmodule_test'), (True, 'data.suppliermodule_test')] cd = dd[1] assert cd.title == 'classes No Name' classes = _process_classes(cd.objects) assert classes == [(True, 'Ancestor'), (True, 'DoNothing'), (True, 'Interface'), (True, 'Specialization')] _should_rels = [('association', 'DoNothing', 'Ancestor'), ('association', 'DoNothing', 'Specialization'), ('implements', 'Ancestor', 'Interface'), ('specialization', 'Specialization', 'Ancestor')] def test_exctract_relations(self, HANDLER, PROJECT): """test extract_relations between classes""" cd = DefaultDiadefGenerator(Linker(PROJECT), HANDLER).visit(PROJECT)[1] cd.extract_relationships() relations = _process_relations(cd.relationships) assert relations == self._should_rels def test_functional_relation_extraction(self): """functional test of relations extraction; different classes possibly in different modules""" # XXX should be catching pyreverse environnement problem but doesn't # pyreverse doesn't extracts the relations but this test ok project = get_project('data') handler = DiadefsHandler(Config()) diadefs = handler.get_diadefs(project, Linker(project, tag=True) ) cd = diadefs[1] relations = _process_relations(cd.relationships) assert relations == self._should_rels def test_known_values2(self, HANDLER): project = get_project('data.clientmodule_test') dd = DefaultDiadefGenerator(Linker(project), HANDLER).visit(project) assert len(dd) == 1 keys = [d.TYPE for d in dd] assert keys == ['class'] cd = dd[0] assert cd.title == 'classes No Name' classes = _process_classes(cd.objects) assert classes == [(True, 'Ancestor'), (True, 'Specialization')] def test_known_values1(HANDLER, PROJECT): HANDLER.config.classes = ['Specialization'] cdg = ClassDiadefGenerator(Linker(PROJECT), HANDLER) special = 'data.clientmodule_test.Specialization' cd = cdg.class_diagram(PROJECT, special) assert cd.title == special classes = _process_classes(cd.objects) assert classes == [(True, 'data.clientmodule_test.Ancestor'), (True, special), (True, 'data.suppliermodule_test.DoNothing')] def test_known_values2(HANDLER, PROJECT): HANDLER.config.classes = ['Specialization'] HANDLER.config.module_names = False cd = ClassDiadefGenerator(Linker(PROJECT), HANDLER).class_diagram(PROJECT, 'data.clientmodule_test.Specialization') assert cd.title == 'data.clientmodule_test.Specialization' classes = _process_classes(cd.objects) assert classes == [(True, 'Ancestor'), (True, 'DoNothing'), (True, 'Specialization')]
arju88nair/projectCulminate
venv/lib/python3.5/site-packages/pylint/test/unittest_pyreverse_diadefs.py
Python
apache-2.0
6,195
""" Title: Using pre-trained word embeddings Author: [fchollet](https://twitter.com/fchollet) Date created: 2020/05/05 Last modified: 2020/05/05 Description: Text classification on the Newsgroup20 dataset using pre-trained GloVe word embeddings. """ """ ## Setup """ import numpy as np import tensorflow as tf from tensorflow import keras """ ## Introduction In this example, we show how to train a text classification model that uses pre-trained word embeddings. We'll work with the Newsgroup20 dataset, a set of 20,000 message board messages belonging to 20 different topic categories. For the pre-trained word embeddings, we'll use [GloVe embeddings](http://nlp.stanford.edu/projects/glove/). """ """ ## Download the Newsgroup20 data """ data_path = keras.utils.get_file( "news20.tar.gz", "http://www.cs.cmu.edu/afs/cs.cmu.edu/project/theo-20/www/data/news20.tar.gz", untar=True, ) """ ## Let's take a look at the data """ import os import pathlib data_dir = pathlib.Path(data_path).parent / "20_newsgroup" dirnames = os.listdir(data_dir) print("Number of directories:", len(dirnames)) print("Directory names:", dirnames) fnames = os.listdir(data_dir / "comp.graphics") print("Number of files in comp.graphics:", len(fnames)) print("Some example filenames:", fnames[:5]) """ Here's a example of what one file contains: """ print(open(data_dir / "comp.graphics" / "38987").read()) """ As you can see, there are header lines that are leaking the file's category, either explicitly (the first line is literally the category name), or implicitly, e.g. via the `Organization` filed. Let's get rid of the headers: """ samples = [] labels = [] class_names = [] class_index = 0 for dirname in sorted(os.listdir(data_dir)): class_names.append(dirname) dirpath = data_dir / dirname fnames = os.listdir(dirpath) print("Processing %s, %d files found" % (dirname, len(fnames))) for fname in fnames: fpath = dirpath / fname f = open(fpath, encoding="latin-1") content = f.read() lines = content.split("\n") lines = lines[10:] content = "\n".join(lines) samples.append(content) labels.append(class_index) class_index += 1 print("Classes:", class_names) print("Number of samples:", len(samples)) """ There's actually one category that doesn't have the expected number of files, but the difference is small enough that the problem remains a balanced classification problem. """ """ ## Shuffle and split the data into training & validation sets """ # Shuffle the data seed = 1337 rng = np.random.RandomState(seed) rng.shuffle(samples) rng = np.random.RandomState(seed) rng.shuffle(labels) # Extract a training & validation split validation_split = 0.2 num_validation_samples = int(validation_split * len(samples)) train_samples = samples[:-num_validation_samples] val_samples = samples[-num_validation_samples:] train_labels = labels[:-num_validation_samples] val_labels = labels[-num_validation_samples:] """ ## Create a vocabulary index Let's use the `TextVectorization` to index the vocabulary found in the dataset. Later, we'll use the same layer instance to vectorize the samples. Our layer will only consider the top 20,000 words, and will truncate or pad sequences to be actually 200 tokens long. """ from tensorflow.keras.layers import TextVectorization vectorizer = TextVectorization(max_tokens=20000, output_sequence_length=200) text_ds = tf.data.Dataset.from_tensor_slices(train_samples).batch(128) vectorizer.adapt(text_ds) """ You can retrieve the computed vocabulary used via `vectorizer.get_vocabulary()`. Let's print the top 5 words: """ vectorizer.get_vocabulary()[:5] """ Let's vectorize a test sentence: """ output = vectorizer([["the cat sat on the mat"]]) output.numpy()[0, :6] """ As you can see, "the" gets represented as "2". Why not 0, given that "the" was the first word in the vocabulary? That's because index 0 is reserved for padding and index 1 is reserved for "out of vocabulary" tokens. Here's a dict mapping words to their indices: """ voc = vectorizer.get_vocabulary() word_index = dict(zip(voc, range(len(voc)))) """ As you can see, we obtain the same encoding as above for our test sentence: """ test = ["the", "cat", "sat", "on", "the", "mat"] [word_index[w] for w in test] """ ## Load pre-trained word embeddings """ """ Let's download pre-trained GloVe embeddings (a 822M zip file). You'll need to run the following commands: ``` !wget http://nlp.stanford.edu/data/glove.6B.zip !unzip -q glove.6B.zip ``` """ """ The archive contains text-encoded vectors of various sizes: 50-dimensional, 100-dimensional, 200-dimensional, 300-dimensional. We'll use the 100D ones. Let's make a dict mapping words (strings) to their NumPy vector representation: """ path_to_glove_file = os.path.join( os.path.expanduser("~"), ".keras/datasets/glove.6B.100d.txt" ) embeddings_index = {} with open(path_to_glove_file) as f: for line in f: word, coefs = line.split(maxsplit=1) coefs = np.fromstring(coefs, "f", sep=" ") embeddings_index[word] = coefs print("Found %s word vectors." % len(embeddings_index)) """ Now, let's prepare a corresponding embedding matrix that we can use in a Keras `Embedding` layer. It's a simple NumPy matrix where entry at index `i` is the pre-trained vector for the word of index `i` in our `vectorizer`'s vocabulary. """ num_tokens = len(voc) + 2 embedding_dim = 100 hits = 0 misses = 0 # Prepare embedding matrix embedding_matrix = np.zeros((num_tokens, embedding_dim)) for word, i in word_index.items(): embedding_vector = embeddings_index.get(word) if embedding_vector is not None: # Words not found in embedding index will be all-zeros. # This includes the representation for "padding" and "OOV" embedding_matrix[i] = embedding_vector hits += 1 else: misses += 1 print("Converted %d words (%d misses)" % (hits, misses)) """ Next, we load the pre-trained word embeddings matrix into an `Embedding` layer. Note that we set `trainable=False` so as to keep the embeddings fixed (we don't want to update them during training). """ from tensorflow.keras.layers import Embedding embedding_layer = Embedding( num_tokens, embedding_dim, embeddings_initializer=keras.initializers.Constant(embedding_matrix), trainable=False, ) """ ## Build the model A simple 1D convnet with global max pooling and a classifier at the end. """ from tensorflow.keras import layers int_sequences_input = keras.Input(shape=(None,), dtype="int64") embedded_sequences = embedding_layer(int_sequences_input) x = layers.Conv1D(128, 5, activation="relu")(embedded_sequences) x = layers.MaxPooling1D(5)(x) x = layers.Conv1D(128, 5, activation="relu")(x) x = layers.MaxPooling1D(5)(x) x = layers.Conv1D(128, 5, activation="relu")(x) x = layers.GlobalMaxPooling1D()(x) x = layers.Dense(128, activation="relu")(x) x = layers.Dropout(0.5)(x) preds = layers.Dense(len(class_names), activation="softmax")(x) model = keras.Model(int_sequences_input, preds) model.summary() """ ## Train the model First, convert our list-of-strings data to NumPy arrays of integer indices. The arrays are right-padded. """ x_train = vectorizer(np.array([[s] for s in train_samples])).numpy() x_val = vectorizer(np.array([[s] for s in val_samples])).numpy() y_train = np.array(train_labels) y_val = np.array(val_labels) """ We use categorical crossentropy as our loss since we're doing softmax classification. Moreover, we use `sparse_categorical_crossentropy` since our labels are integers. """ model.compile( loss="sparse_categorical_crossentropy", optimizer="rmsprop", metrics=["acc"] ) model.fit(x_train, y_train, batch_size=128, epochs=20, validation_data=(x_val, y_val)) """ ## Export an end-to-end model Now, we may want to export a `Model` object that takes as input a string of arbitrary length, rather than a sequence of indices. It would make the model much more portable, since you wouldn't have to worry about the input preprocessing pipeline. Our `vectorizer` is actually a Keras layer, so it's simple: """ string_input = keras.Input(shape=(1,), dtype="string") x = vectorizer(string_input) preds = model(x) end_to_end_model = keras.Model(string_input, preds) probabilities = end_to_end_model.predict( [["this message is about computer graphics and 3D modeling"]] ) class_names[np.argmax(probabilities[0])]
keras-team/keras-io
examples/nlp/pretrained_word_embeddings.py
Python
apache-2.0
8,473
import sympy import tempfile import os from sympy import symbols, Eq, Mod from sympy.external import import_module from sympy.tensor import IndexedBase, Idx from sympy.utilities.autowrap import autowrap, ufuncify, CodeWrapError from sympy.utilities.pytest import skip numpy = import_module('numpy', min_module_version='1.6.1') Cython = import_module('Cython', min_module_version='0.15.1') f2py = import_module('numpy.f2py', __import__kwargs={'fromlist': ['f2py']}) f2pyworks = False if f2py: try: autowrap(symbols('x'), 'f95', 'f2py') except (CodeWrapError, ImportError, OSError): f2pyworks = False else: f2pyworks = True a, b, c = symbols('a b c') n, m, d = symbols('n m d', integer=True) A, B, C = symbols('A B C', cls=IndexedBase) i = Idx('i', m) j = Idx('j', n) k = Idx('k', d) def has_module(module): """ Return True if module exists, otherwise run skip(). module should be a string. """ # To give a string of the module name to skip(), this function takes a # string. So we don't waste time running import_module() more than once, # just map the three modules tested here in this dict. modnames = {'numpy': numpy, 'Cython': Cython, 'f2py': f2py} if modnames[module]: if module == 'f2py' and not f2pyworks: skip("Couldn't run f2py.") return True skip("Couldn't import %s." % module) # # test runners used by several language-backend combinations # def runtest_autowrap_twice(language, backend): f = autowrap((((a + b)/c)**5).expand(), language, backend) g = autowrap((((a + b)/c)**4).expand(), language, backend) # check that autowrap updates the module name. Else, g gives the same as f assert f(1, -2, 1) == -1.0 assert g(1, -2, 1) == 1.0 def runtest_autowrap_trace(language, backend): has_module('numpy') trace = autowrap(A[i, i], language, backend) assert trace(numpy.eye(100)) == 100 def runtest_autowrap_matrix_vector(language, backend): has_module('numpy') x, y = symbols('x y', cls=IndexedBase) expr = Eq(y[i], A[i, j]*x[j]) mv = autowrap(expr, language, backend) # compare with numpy's dot product M = numpy.random.rand(10, 20) x = numpy.random.rand(20) y = numpy.dot(M, x) assert numpy.sum(numpy.abs(y - mv(M, x))) < 1e-13 def runtest_autowrap_matrix_matrix(language, backend): has_module('numpy') expr = Eq(C[i, j], A[i, k]*B[k, j]) matmat = autowrap(expr, language, backend) # compare with numpy's dot product M1 = numpy.random.rand(10, 20) M2 = numpy.random.rand(20, 15) M3 = numpy.dot(M1, M2) assert numpy.sum(numpy.abs(M3 - matmat(M1, M2))) < 1e-13 def runtest_ufuncify(language, backend): has_module('numpy') a, b, c = symbols('a b c') fabc = ufuncify([a, b, c], a*b + c, backend=backend) facb = ufuncify([a, c, b], a*b + c, backend=backend) grid = numpy.linspace(-2, 2, 50) b = numpy.linspace(-5, 4, 50) c = numpy.linspace(-1, 1, 50) expected = grid*b + c numpy.testing.assert_allclose(fabc(grid, b, c), expected) numpy.testing.assert_allclose(facb(grid, c, b), expected) def runtest_issue_10274(language, backend): expr = (a - b + c)**(13) tmp = tempfile.mkdtemp() f = autowrap(expr, language, backend, tempdir=tmp, helpers=('helper', a - b + c, (a, b, c))) assert f(1, 1, 1) == 1 for file in os.listdir(tmp): if file.startswith("wrapped_code_") and file.endswith(".c"): fil = open(tmp + '/' + file) lines = fil.readlines() assert lines[0] == "/******************************************************************************\n" assert "Code generated with sympy " + sympy.__version__ in lines[1] assert lines[2:] == [ " * *\n", " * See http://www.sympy.org/ for more information. *\n", " * *\n", " * This file is part of 'autowrap' *\n", " ******************************************************************************/\n", "#include " + '"' + file[:-1]+ 'h"' + "\n", "#include <math.h>\n", "\n", "double helper(double a, double b, double c) {\n", "\n", " double helper_result;\n", " helper_result = a - b + c;\n", " return helper_result;\n", "\n", "}\n", "\n", "double autofunc(double a, double b, double c) {\n", "\n", " double autofunc_result;\n", " autofunc_result = pow(helper(a, b, c), 13);\n", " return autofunc_result;\n", "\n", "}\n", ] def runtest_issue_15337(language, backend): has_module('numpy') # NOTE : autowrap was originally designed to only accept an iterable for # the kwarg "helpers", but in issue 10274 the user mistakenly thought that # if there was only a single helper it did not need to be passed via an # iterable that wrapped the helper tuple. There were no tests for this # behavior so when the code was changed to accept a single tuple it broke # the original behavior. These tests below ensure that both now work. a, b, c, d, e = symbols('a, b, c, d, e') expr = (a - b + c - d + e)**13 exp_res = (1. - 2. + 3. - 4. + 5.)**13 f = autowrap(expr, language, backend, args=(a, b, c, d, e), helpers=('f1', a - b + c, (a, b, c))) numpy.testing.assert_allclose(f(1, 2, 3, 4, 5), exp_res) f = autowrap(expr, language, backend, args=(a, b, c, d, e), helpers=(('f1', a - b, (a, b)), ('f2', c - d, (c, d)))) numpy.testing.assert_allclose(f(1, 2, 3, 4, 5), exp_res) def test_issue_15230(): has_module('f2py') x, y = symbols('x, y') expr = Mod(x, 3.0) - Mod(y, -2.0) f = autowrap(expr, args=[x, y], language='F95') exp_res = float(expr.xreplace({x: 3.5, y: 2.7}).evalf()) assert abs(f(3.5, 2.7) - exp_res) < 1e-14 x, y = symbols('x, y', integer=True) expr = Mod(x, 3) - Mod(y, -2) f = autowrap(expr, args=[x, y], language='F95') assert f(3, 2) == expr.xreplace({x: 3, y: 2}) # # tests of language-backend combinations # # f2py def test_wrap_twice_f95_f2py(): has_module('f2py') runtest_autowrap_twice('f95', 'f2py') def test_autowrap_trace_f95_f2py(): has_module('f2py') runtest_autowrap_trace('f95', 'f2py') def test_autowrap_matrix_vector_f95_f2py(): has_module('f2py') runtest_autowrap_matrix_vector('f95', 'f2py') def test_autowrap_matrix_matrix_f95_f2py(): has_module('f2py') runtest_autowrap_matrix_matrix('f95', 'f2py') def test_ufuncify_f95_f2py(): has_module('f2py') runtest_ufuncify('f95', 'f2py') def test_issue_15337_f95_f2py(): has_module('f2py') runtest_issue_15337('f95', 'f2py') # Cython def test_wrap_twice_c_cython(): has_module('Cython') runtest_autowrap_twice('C', 'cython') def test_autowrap_trace_C_Cython(): has_module('Cython') runtest_autowrap_trace('C99', 'cython') def test_autowrap_matrix_vector_C_cython(): has_module('Cython') runtest_autowrap_matrix_vector('C99', 'cython') def test_autowrap_matrix_matrix_C_cython(): has_module('Cython') runtest_autowrap_matrix_matrix('C99', 'cython') def test_ufuncify_C_Cython(): has_module('Cython') runtest_ufuncify('C99', 'cython') def test_issue_10274_C_cython(): has_module('Cython') runtest_issue_10274('C89', 'cython') def test_issue_15337_C_cython(): has_module('Cython') runtest_issue_15337('C89', 'cython') def test_autowrap_custom_printer(): has_module('Cython') from sympy import pi from sympy.utilities.codegen import C99CodeGen from sympy.printing.ccode import C99CodePrinter from sympy.functions.elementary.exponential import exp class PiPrinter(C99CodePrinter): def _print_Pi(self, expr): return "S_PI" printer = PiPrinter() gen = C99CodeGen(printer=printer) gen.preprocessor_statements.append('#include "shortpi.h"') expr = pi * a expected = ( '#include "%s"\n' '#include <math.h>\n' '#include "shortpi.h"\n' '\n' 'double autofunc(double a) {\n' '\n' ' double autofunc_result;\n' ' autofunc_result = S_PI*a;\n' ' return autofunc_result;\n' '\n' '}\n' ) tmpdir = tempfile.mkdtemp() # write a trivial header file to use in the generated code open(os.path.join(tmpdir, 'shortpi.h'), 'w').write('#define S_PI 3.14') func = autowrap(expr, backend='cython', tempdir=tmpdir, code_gen=gen) assert func(4.2) == 3.14 * 4.2 # check that the generated code is correct for filename in os.listdir(tmpdir): if filename.startswith('wrapped_code') and filename.endswith('.c'): with open(os.path.join(tmpdir, filename)) as f: lines = f.readlines() expected = expected % filename.replace('.c', '.h') assert ''.join(lines[7:]) == expected # Numpy def test_ufuncify_numpy(): # This test doesn't use Cython, but if Cython works, then there is a valid # C compiler, which is needed. has_module('Cython') runtest_ufuncify('C99', 'numpy')
kaushik94/sympy
sympy/external/tests/test_autowrap.py
Python
bsd-3-clause
9,687
# (C) British Crown Copyright 2011 - 2017, Met Office # # This file is part of cartopy. # # cartopy is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # cartopy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with cartopy. If not, see <https://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) import base64 import contextlib import distutils import os import glob import shutil import warnings import matplotlib as mpl import matplotlib.pyplot as plt import matplotlib.patches as mpatches import matplotlib.testing.compare as mcompare import matplotlib._pylab_helpers as pyplot_helpers try: from matplotlib.testing import setup as mpl_setup except ImportError: from matplotlib.tests import setup as mpl_setup MPL_VERSION = distutils.version.LooseVersion(mpl.__version__) class ImageTesting(object): """ Provides a convenient class for running visual Matplotlib tests. In general, this class should be used as a decorator to a test function which generates one (or more) figures. :: @ImageTesting(['simple_test']) def test_simple(): import matplotlib.pyplot as plt plt.plot(range(10)) To find out where the result and expected images reside one can create a empty ImageTesting class instance and get the paths from the :meth:`expected_path` and :meth:`result_path` methods:: >>> import os >>> import cartopy.tests.mpl >>> img_testing = cartopy.tests.mpl.ImageTesting([]) >>> exp_fname = img_testing.expected_path('<TESTNAME>', '<IMGNAME>') >>> result_fname = img_testing.result_path('<TESTNAME>', '<IMGNAME>') >>> img_test_mod_dir = os.path.dirname(cartopy.__file__) >>> print('Result:', os.path.relpath(result_fname, img_test_mod_dir)) ... # doctest: +ELLIPSIS Result: ...output/<TESTNAME>/result-<IMGNAME>.png >>> print('Expected:', os.path.relpath(exp_fname, img_test_mod_dir)) Expected: tests/mpl/baseline_images/mpl/<TESTNAME>/<IMGNAME>.png .. note:: Subclasses of the ImageTesting class may decide to change the location of the expected and result images. However, the same technique for finding the locations of the images should hold true. """ #: The path where the standard ``baseline_images`` exist. root_image_results = os.path.dirname(__file__) #: The path where the images generated by the tests should go. image_output_directory = os.path.join(root_image_results, 'output') if not os.access(image_output_directory, os.W_OK): if not os.access(os.getcwd(), os.W_OK): raise IOError('Write access to a local disk is required to run ' 'image tests. Run the tests from a current working ' 'directory you have write access to to avoid this ' 'issue.') else: image_output_directory = os.path.join(os.getcwd(), 'cartopy_test_output') def __init__(self, img_names, tolerance=(0.1 if MPL_VERSION < '1.4' else 0.5)): # With matplotlib v1.3 the tolerance keyword is an RMS of the pixel # differences, as computed by matplotlib.testing.compare.calculate_rms self.img_names = img_names self.tolerance = tolerance def expected_path(self, test_name, img_name, ext='.png'): """ Return the full path (minus extension) of where the expected image should be found, given the name of the image being tested and the name of the test being run. """ expected_fname = os.path.join(self.root_image_results, 'baseline_images', 'mpl', test_name, img_name) return expected_fname + ext def result_path(self, test_name, img_name, ext='.png'): """ Return the full path (minus extension) of where the result image should be given the name of the image being tested and the name of the test being run. """ result_fname = os.path.join(self.image_output_directory, test_name, 'result-' + img_name) return result_fname + ext def run_figure_comparisons(self, figures, test_name): """ Run the figure comparisons against the ``image_names``. The number of figures passed must be equal to the number of image names in ``self.image_names``. .. note:: The figures are not closed by this method. If using the decorator version of ImageTesting, they will be closed for you. """ n_figures_msg = ('Expected %s figures (based on the number of ' 'image result filenames), but there are %s figures ' 'available. The most likely reason for this is that ' 'this test is producing too many figures, ' '(alternatively if not using ImageCompare as a ' 'decorator, it is possible that a test run prior to ' 'this one has not closed its figures).' '' % (len(self.img_names), len(figures)) ) assert len(figures) == len(self.img_names), n_figures_msg for img_name, figure in zip(self.img_names, figures): expected_path = self.expected_path(test_name, img_name, '.png') result_path = self.result_path(test_name, img_name, '.png') if not os.path.isdir(os.path.dirname(expected_path)): os.makedirs(os.path.dirname(expected_path)) if not os.path.isdir(os.path.dirname(result_path)): os.makedirs(os.path.dirname(result_path)) self.save_figure(figure, result_path) self.do_compare(result_path, expected_path, self.tolerance) def save_figure(self, figure, result_fname): """ The actual call which saves the figure. Returns nothing. May be overridden to do figure based pre-processing (such as removing text objects etc.) """ figure.savefig(result_fname) def do_compare(self, result_fname, expected_fname, tol): """ Runs the comparison of the result file with the expected file. If an RMS difference greater than ``tol`` is found an assertion error is raised with an appropriate message with the paths to the files concerned. """ if not os.path.exists(expected_fname): warnings.warn('Created image in %s' % expected_fname) shutil.copy2(result_fname, expected_fname) err = mcompare.compare_images(expected_fname, result_fname, tol=tol, in_decorator=True) if err: msg = ('Images were different (RMS: %s).\n%s %s %s\nConsider ' 'running idiff to inspect these differences.' '' % (err['rms'], err['actual'], err['expected'], err['diff'])) assert False, msg def __call__(self, test_func): """Called when the decorator is applied to a function.""" test_name = test_func.__name__ mod_name = test_func.__module__ if mod_name == '__main__': import sys fname = sys.modules[mod_name].__file__ mod_name = os.path.basename(os.path.splitext(fname)[0]) mod_name = mod_name.rsplit('.', 1)[-1] def wrapped(*args, **kwargs): orig_backend = plt.get_backend() plt.switch_backend('agg') mpl_setup() if pyplot_helpers.Gcf.figs: warnings.warn('Figures existed before running the %s %s test.' ' All figures should be closed after they run. ' 'They will be closed automatically now.' % (mod_name, test_name)) pyplot_helpers.Gcf.destroy_all() if MPL_VERSION >= '2': style_context = mpl.style.context else: @contextlib.contextmanager def style_context(style, after_reset=False): yield with style_context('classic'): r = test_func(*args, **kwargs) fig_managers = pyplot_helpers.Gcf._activeQue figures = [manager.canvas.figure for manager in fig_managers] try: self.run_figure_comparisons(figures, test_name=mod_name) finally: for figure in figures: pyplot_helpers.Gcf.destroy_fig(figure) plt.switch_backend(orig_backend) return r # nose needs the function's name to be in the form "test_*" to # pick it up wrapped.__name__ = test_name return wrapped def failed_images_iter(): """ Return a generator of [expected, actual, diff] filenames for all failed image tests since the test output directory was created. """ baseline_img_dir = os.path.join(ImageTesting.root_image_results, 'baseline_images', 'mpl') diff_dir = os.path.join(ImageTesting.image_output_directory) baselines = sorted(glob.glob(os.path.join(baseline_img_dir, '*', '*.png'))) for expected_fname in baselines: # Get the relative path of the expected image 2 folders up. expected_rel = os.path.relpath( expected_fname, os.path.dirname(os.path.dirname(expected_fname))) result_fname = os.path.join( diff_dir, os.path.dirname(expected_rel), 'result-' + os.path.basename(expected_rel)) diff_fname = result_fname[:-4] + '-failed-diff.png' if os.path.exists(diff_fname): yield expected_fname, result_fname, diff_fname def failed_images_html(): """ Generates HTML which shows the image failures side-by-side when viewed in a web browser. """ data_uri_template = '<img alt="{alt}" src="data:image/png;base64,{img}">' def image_as_base64(fname): with open(fname, "rb") as fh: return base64.b64encode(fh.read()).decode("ascii") html = ['<!DOCTYPE html>', '<html>', '<body>'] for expected, actual, diff in failed_images_iter(): expected_html = data_uri_template.format( alt='expected', img=image_as_base64(expected)) actual_html = data_uri_template.format( alt='actual', img=image_as_base64(actual)) diff_html = data_uri_template.format( alt='diff', img=image_as_base64(diff)) html.extend([expected, '<br>', expected_html, actual_html, diff_html, '<br><hr>']) html.extend(['</body>', '</html>']) return '\n'.join(html) def show(projection, geometry): orig_backend = mpl.get_backend() plt.switch_backend('tkagg') if geometry.type == 'MultiPolygon' and 1: multi_polygon = geometry for polygon in multi_polygon: import cartopy.mpl.patch as patch paths = patch.geos_to_path(polygon) for pth in paths: patch = mpatches.PathPatch(pth, edgecolor='none', lw=0, alpha=0.2) plt.gca().add_patch(patch) line_string = polygon.exterior plt.plot(*zip(*line_string.coords), marker='+', linestyle='-') elif geometry.type == 'MultiPolygon': multi_polygon = geometry for polygon in multi_polygon: line_string = polygon.exterior plt.plot(*zip(*line_string.coords), marker='+', linestyle='-') elif geometry.type == 'MultiLineString': multi_line_string = geometry for line_string in multi_line_string: plt.plot(*zip(*line_string.coords), marker='+', linestyle='-') elif geometry.type == 'LinearRing': plt.plot(*zip(*geometry.coords), marker='+', linestyle='-') if 1: # Whole map domain plt.autoscale() elif 0: # The left-hand triangle plt.xlim(-1.65e7, -1.2e7) plt.ylim(0.3e7, 0.65e7) elif 0: # The tip of the left-hand triangle plt.xlim(-1.65e7, -1.55e7) plt.ylim(0.3e7, 0.4e7) elif 1: # The very tip of the left-hand triangle plt.xlim(-1.632e7, -1.622e7) plt.ylim(0.327e7, 0.337e7) elif 1: # The tip of the right-hand triangle plt.xlim(1.55e7, 1.65e7) plt.ylim(0.3e7, 0.4e7) plt.plot(*zip(*projection.boundary.coords), marker='o', scalex=False, scaley=False, zorder=-1) plt.show() plt.switch_backend(orig_backend)
decvalts/cartopy
lib/cartopy/tests/mpl/__init__.py
Python
gpl-3.0
13,516
from django.conf.urls import patterns, url from links import views urlpatterns = patterns('links.views', url(r'^link/settings/$', views.settings, name = 'settings'), url(r'^link/donate/(?P<url>[\d\w.]+)$', views.kintera_redirect, name = 'donate'), url(r'^link/rider/(?P<url>[\d\w.]+)$', views.t4k_redirect, name = 'profile'), )
ethanperez/t4k-rms
links/urls.py
Python
mit
340
from django.shortcuts import render from django.contrib.auth.models import User from django.shortcuts import redirect, get_object_or_404 from django.core.urlresolvers import reverse from .forms import RegistroUserForm, EditarUserForm from .models import UserProfile from django.contrib.auth import authenticate, login, logout from django.contrib.auth.decorators import login_required from django.template import RequestContext from django.contrib import messages # Create your views here. @login_required def index_view(request): return render(request,'coworkersimpaqto/index.html') def login_view(request): if request.user.is_authenticated(): return render(request,'coworkersimpaqto/index.html') mensaje = '' if request.method == 'POST': username = request.POST.get('username') password = request.POST.get('password') user = authenticate(username=username,password=password) if user is not None: if user.is_active: login(request, user) return render(request,'coworkersimpaqto/index.html',{'nombre':username}) else: mensaje = 'Usuario se encuentra inactivo.' return render(request,'accounts/login.html',{'mensaje':mensaje}) mensaje = 'Nombre de usuario o contraseña no valido.' return render(request,'accounts/login.html',{'mensaje':mensaje}) def logout_view(request): logout(request) messages.success(request,'Te has desconectado con exito.') return redirect(reverse('accounts.login')) @login_required def registro_usuario_view(request): if request.method == 'POST': form = RegistroUserForm(request.POST,request.FILES) if form.is_valid(): cleaned_data = form.cleaned_data username = cleaned_data.get('username') password = cleaned_data.get('password') email = cleaned_data.get('email') photo = cleaned_data.get('photo') user_model = User.objects.create_user(username=username, email=email, password=password) user_model.save() user_profile = UserProfile() user_profile.user = user_model user_profile.photo = photo user_profile.save() #return redirect(reverse('index'),kwargs={'nombre':username}) return redirect(reverse('accounts.listado.userprofile')) else: form = RegistroUserForm() context = { 'form' : form } return render(request,'accounts/registrousuario.html',context) @login_required def list_usuarios_view(request): userprofiles = UserProfile.objects.all() context={'userprofiles': userprofiles} return render(request,'accounts/listUser.html',context) @login_required def editar_accounts(request,pk=None): userProfile= get_object_or_404(UserProfile,id=pk) request.userProfile=userProfile if request.method == 'POST': form = EditarUserForm(request.POST) if form.is_valid(): userProfile = get_object_or_404(UserProfile,id=pk) cleaned_data = form.cleaned_data password = form.cleaned_data['password'] if password: user = userProfile.user user.set_password(password) user.save(); #photo = form.cleaned_data['photo'] #if photo: # userProfile.photo=photo # userdos= userProfile.user # userdos.username = 'luis' #userdos.save() userProfile.save() return redirect (reverse('accounts.listado.userprofile')) else: userProfile =get_object_or_404(UserProfile,id=pk) request.userProfile = userProfile if userProfile: form = EditarUserForm(request.POST or None,initial={'username':request.userProfile.user.username,'email':request.userProfile.user.email,'password':request.userProfile.user.password,'password2':request.userProfile.user.password,'photo':request.userProfile.photo}) else: form = EditarUserForm(request.POST or None) return render(request, 'accounts/edicion.html',{'form' : form,'userProfile':userProfile, 'titulo':'Editar Usuario','retorno':'accounts.listado.userprofile',})
vimeworks/ImpaQto
accounts/views.py
Python
mit
4,370
# Run these tests with 'nosetests': # install the 'python-nose' package (Fedora/CentOS or Ubuntu) # run 'nosetests' in the root of the repository import unittest import pkg class RpmTests(unittest.TestCase): def setUp(self): # 'setUp' breaks Pylint's naming rules # pylint: disable=C0103 self.spec = pkg.Spec("tests/data/ocaml-cohttp.spec", dist=".el6") def test_good_filename_preprocessor(self): pkg.Spec("tests/data/ocaml-cohttp.spec.in") def test_bad_filename(self): self.assertRaises(pkg.SpecNameMismatch, pkg.Spec, "tests/data/bad-name.spec") def test_bad_filename_preprocessor(self): self.assertRaises(pkg.SpecNameMismatch, pkg.Spec, "tests/data/bad-name.spec.in") def test_name(self): self.assertEqual(self.spec.name(), "ocaml-cohttp") def test_specpath(self): self.assertEqual(self.spec.specpath(), "./SPECS/ocaml-cohttp.spec") def test_version(self): self.assertEqual(self.spec.version(), "0.9.8") def test_provides(self): self.assertEqual( self.spec.provides(), set(["ocaml-cohttp", "ocaml-cohttp-devel"])) def test_source_urls(self): self.assertEqual( self.spec.source_urls(), ["ocaml-cohttp-init", "file:///code/ocaml-cohttp-extra#ocaml-cohttp-extra-0.9.8.tar.gz", "https://github.com/mirage/ocaml-cohttp/archive/" "ocaml-cohttp-0.9.8/ocaml-cohttp-0.9.8.tar.gz"]) def test_source_paths(self): self.assertEqual( self.spec.source_paths(), ["./SOURCES/ocaml-cohttp-init", "./SOURCES/ocaml-cohttp-extra-0.9.8.tar.gz", "./SOURCES/ocaml-cohttp-0.9.8.tar.gz"]) def test_buildrequires(self): self.assertEqual( self.spec.buildrequires(), set(["ocaml", "ocaml-findlib", "ocaml-re-devel", "ocaml-uri-devel", "ocaml-cstruct-devel", "ocaml-lwt-devel", "ocaml-ounit-devel", "ocaml-ocamldoc", "ocaml-camlp4-devel", "openssl", "openssl-devel"])) def test_source_package_path(self): self.assertEqual( self.spec.source_package_path(), "./SRPMS/ocaml-cohttp-0.9.8-1.el6.src.rpm") def test_binary_package_paths(self): self.assertEqual( sorted(self.spec.binary_package_paths()), sorted(["./RPMS/x86_64/ocaml-cohttp-0.9.8-1.el6.x86_64.rpm", "./RPMS/x86_64/" + "ocaml-cohttp-devel-0.9.8-1.el6.x86_64.rpm"])) class DebTests(unittest.TestCase): def setUp(self): # 'setUp' breaks Pylint's naming rules # pylint: disable=C0103 def map_rpm_to_deb(name): mapping = {"ocaml-cohttp": ["libcohttp-ocaml"], "ocaml-cohttp-devel": ["libcohttp-ocaml-dev"], "ocaml": ["ocaml-nox", "ocaml-native-compilers"], "ocaml-findlib": ["ocaml-findlib"], "ocaml-re-devel": ["libre-ocaml-dev"], "ocaml-uri-devel": ["liburi-ocaml-dev"], "ocaml-cstruct-devel": ["libcstruct-ocaml-dev"], "ocaml-lwt-devel": ["liblwt-ocaml-dev"], "ocaml-ounit-devel": ["libounit-ocaml-dev"], "ocaml-ocamldoc": ["ocaml-nox"], "ocaml-camlp4-devel": ["camlp4", "camlp4-extra"], "openssl": ["libssl1.0.0"], "openssl-devel": ["libssl-dev"]} return mapping[name] self.spec = pkg.Spec("./tests/data/ocaml-cohttp.spec", target="deb", map_name=map_rpm_to_deb) def test_name(self): self.assertEqual(self.spec.name(), "ocaml-cohttp") def test_specpath(self): self.assertEqual(self.spec.specpath(), "./SPECS/ocaml-cohttp.spec") def test_version(self): self.assertEqual(self.spec.version(), "0.9.8") def test_provides(self): self.assertEqual( self.spec.provides(), set(["libcohttp-ocaml", "libcohttp-ocaml-dev"])) def test_source_urls(self): self.assertEqual( self.spec.source_urls(), ["ocaml-cohttp-init", "file:///code/ocaml-cohttp-extra#ocaml-cohttp-extra-0.9.8.tar.gz", "https://github.com/mirage/ocaml-cohttp/archive/" + "ocaml-cohttp-0.9.8/ocaml-cohttp-0.9.8.tar.gz"]) def test_source_paths(self): self.assertEqual( self.spec.source_paths(), ["./SOURCES/ocaml-cohttp-init", "./SOURCES/ocaml-cohttp-extra-0.9.8.tar.gz", "./SOURCES/ocaml-cohttp-0.9.8.tar.gz"]) def test_buildrequires(self): self.assertEqual( self.spec.buildrequires(), set(["ocaml-nox", "ocaml-native-compilers", "ocaml-findlib", "libre-ocaml-dev", "liburi-ocaml-dev", "libcstruct-ocaml-dev", "liblwt-ocaml-dev", "libounit-ocaml-dev", "camlp4", "camlp4-extra", "libssl1.0.0", "libssl-dev"])) def test_source_package_path(self): self.assertEqual( self.spec.source_package_path(), "./SRPMS/libcohttp-ocaml_0.9.8-1.dsc") def test_binary_package_paths(self): self.assertEqual( sorted(self.spec.binary_package_paths()), sorted(["./RPMS/libcohttp-ocaml_0.9.8-1_amd64.deb", "./RPMS/libcohttp-ocaml-dev_0.9.8-1_amd64.deb"]))
simonjbeaumont/planex
tests/test_pkg.py
Python
lgpl-2.1
5,676
from ffthompy.general.base import * __author__ = "Jaroslav Vondrejc" __copyright__ = """Copyright 2016, Jaroslav Vondrejc""" __email__ = "vondrejc@gmail.com"
vondrejc/FFTHomPy
ffthompy/__init__.py
Python
mit
159
''' URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) ''' from django.conf import settings from coco.admin.admin import admin_site from django.conf.urls import include, url urlpatterns = [ url(r'^{}admin/doc/'.format(settings.SUBDIRECTORY), include('django.contrib.admindocs.urls')), url(r'^{}admin/'.format(settings.SUBDIRECTORY), include(admin_site.urls)), url(r'^{}api/'.format(settings.SUBDIRECTORY), include('coco.api.urls')), url(r'^{}'.format(settings.SUBDIRECTORY), include('coco.web.urls')) ] ''' Registering custom error handlers. ''' handler404 = 'coco.web.views.system.error_404' handler500 = 'coco.web.views.system.error_500'
coco-project/coco
coco/urls.py
Python
bsd-3-clause
1,211
from __future__ import unicode_literals from django.contrib.auth.models import User from django.db import models # Create your models here. class Noticia(models.Model): Publicado = 'Publicado' Borrador = 'Borrador' Titulo = models.CharField(max_length=30) Subtitulo = models.CharField(max_length=50) Imagen = models.FileField(blank=True, upload_to='media/fotos/noticias') SubtituloImag = models.CharField(max_length=30) Cuerpo = models.TextField(max_length=500) Timestamp = models.DateTimeField(auto_now_add = True, auto_now = False) Actualizado = models.DateTimeField(auto_now_add = False, auto_now = True) CHOICES=[(Publicado, 'Publicado'),(Borrador, 'Borrador')] Estado = models.CharField(max_length=9,choices=CHOICES, default=Borrador) IncluirVideo = models.BooleanField() CodVideo = models.CharField(max_length=200) Tags = models.CharField(max_length=30) usuario = models.ForeignKey(User) def __str__(self): return self.Titulo + ' - ' + self.Subtitulo class Evento(models.Model): Titulo = models.CharField(max_length=30) Subtitulo = models.CharField(max_length=50) Imagen = models.FileField(blank=True, upload_to='media/fotos/noticias') SubtituloImag = models.CharField(max_length=30) Cuerpo = models.CharField(max_length=500) Timestamp = models.DateTimeField(auto_now_add = True, auto_now = False) Actualizado = models.DateTimeField(auto_now_add = False, auto_now = True) Lugar = models.CharField(max_length=50) Fecha = models.DateTimeField(auto_now_add = False) Organizadores = models.CharField(max_length=30) Ponente = models.CharField(max_length=30) Tags = models.CharField(max_length=30) def __str__(self): return self.Titulo + ' - ' + self.Subtitulo
magvugr/AT
AppAdiccionTic/models.py
Python
gpl-3.0
1,704
"""File system wrapper. """ import os def read_file(path): """Return decoded file content for specified path """ fptr = open(path, 'r') content = fptr.read() fptr.close() return content.decode('latin1') def dir_contents(path, sort=True): """Return list of all entries in a directory for specified path. """ contents = [os.path.join(path, f) for f in os.listdir(path)] return sorted(contents) if sort else contents def dir_walk(path, sort=True): """Return list of all entries in a directory tree for specified path. """ contents = [] for root, _, files in os.walk(path): contents += [os.path.join(root, f) for f in files] return sorted(contents) if sort else contents
mharrys/spam-filter
fs.py
Python
lgpl-2.1
741
import os from collections import namedtuple from enum import Enum import logging log = logging.getLogger(__name__) class LoadState(Enum): INITIAL = 0 LOADING = 1 DONE = 2 FAILED = 3 BookData = namedtuple('BookData', ['filename', 'width', 'height', 'page_number', 'bookmarks', 'indexed', 'pages', 'load_state', 'num_pages']) BookData.__new__.__defaults__ = (None, None, None, 0, tuple([0]), None, tuple(), LoadState.INITIAL, 0) class BookFile(BookData): @property def ext(self): return os.path.splitext(self.filename)[-1].lower() @property def title(self): basename = os.path.basename(self.filename) title = os.path.splitext(basename)[0].replace('_', ' ') return title def get_num_pages(self): if self.load_state != LoadState.DONE: log.warning('get pages on not-yet-loaded book') if self.indexed: return self.num_pages else: return len(self.pages) def set_page(self, page): if page < 0: page = 0 elif page >= self.get_num_pages(): page = self.get_num_pages() - 1 return self._replace(page_number=page)
Bristol-Braille/canute-ui
ui/book/book_file.py
Python
gpl-3.0
1,397
############################################################################### ## ## Copyright (C) 2006-2011, University of Utah. ## All rights reserved. ## Contact: contact@vistrails.org ## ## This file is part of VisTrails. ## ## "Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are met: ## ## - Redistributions of source code must retain the above copyright notice, ## this list of conditions and the following disclaimer. ## - Redistributions in binary form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in the ## documentation and/or other materials provided with the distribution. ## - Neither the name of the University of Utah nor the names of its ## contributors may be used to endorse or promote products derived from ## this software without specific prior written permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, ## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR ## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; ## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR ## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## ############################################################################### """ This common widgets using on the interface of VisTrails. These are only simple widgets in term of coding and additional features. It should have no interaction with VisTrail core""" from PyQt4 import QtCore, QtGui from PyQt4.QtCore import pyqtSlot, pyqtSignal from gui.theme import CurrentTheme from gui.modules.constant_configuration import StandardConstantWidget from core.system import systemType ################################################################################ class QToolWindow(QtGui.QDockWidget): """ QToolWindow is a floating-dockable widget. It also keeps track of its widget window title to update the tool window accordingly """ def __init__(self, widget=None, parent=None): """ QToolWindow(parent: QWidget) -> QToolWindow Construct a floating, dockable widget """ QtGui.QDockWidget.__init__(self, parent) self.setFeatures(QtGui.QDockWidget.AllDockWidgetFeatures) self.mwindow = QtGui.QMainWindow(self) self.centralwidget = widget self.mwindow.setWindowFlags(QtCore.Qt.Widget) self.mwindow.setCentralWidget(widget) self.setWidget(self.mwindow) self.createToolBar() if widget: self.setWindowTitle(widget.windowTitle()) self.pinStatus = False self.monitorWindowTitle(widget) self.connect(self, QtCore.SIGNAL("topLevelChanged(bool)"), self.setDefaultPinStatus) def createToolBar(self): self.toolbar = QtGui.QToolBar(self.mwindow) self.pinButton = QtGui.QAction(CurrentTheme.UNPINNED_PALETTE_ICON, "", self.toolbar,checkable=True, checked=False, toggled=self.pinStatusChanged) self.pinButton.setToolTip("Pin this on the Tab Bar") spacer = QtGui.QWidget() spacer.setSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Preferred) self.toolbar.addWidget(spacer) self.toolbar.addAction(self.pinButton) self.pinAction = self.pinButton self.toolbar.setFloatable(False) self.toolbar.setMovable(False) self.toolbar.setIconSize(QtCore.QSize(16,16)) self.mwindow.addToolBar(self.toolbar) def setDefaultPinStatus(self, topLevel): if topLevel: self.setPinStatus(False) self.pinButton.setEnabled(False) else: self.pinButton.setEnabled(True) def pinStatusChanged(self, pinStatus): self.pinStatus = pinStatus self.updateButtonIcon(pinStatus) def updateButtonIcon(self, on): if on: self.pinButton.setIcon(CurrentTheme.PINNED_PALETTE_ICON) self.pinButton.setToolTip("Unpin this from the the Tab Bar") else: self.pinButton.setIcon(CurrentTheme.UNPINNED_PALETTE_ICON) self.pinButton.setToolTip("Pin this on the Tab Bar") def setPinStatus(self, pinStatus): self.pinStatus = pinStatus self.pinButton.setChecked(pinStatus) self.updateButtonIcon(pinStatus) def monitorWindowTitle(self, widget): """ monitorWindowTitle(widget: QWidget) -> None Watching window title changed on widget and use it as a window title on this tool window """ if widget: widget.installEventFilter(self) def eventFilter(self, object, event): """ eventFilter(object: QObject, event: QEvent) -> bool Filter window title change event to change the tool window title """ if event.type()==QtCore.QEvent.WindowTitleChange: self.setWindowTitle(object.windowTitle()) elif event.type()==QtCore.QEvent.Close: object.removeEventFilter(self) return QtGui.QDockWidget.eventFilter(self, object, event) # return super(QToolWindow, self).eventFilter(object, event) class QToolWindowInterface(object): """ QToolWindowInterface can be co-inherited in any class to allow the inherited class to switch to be contained in a window """ def toolWindow(self): """ toolWindow() -> None Return the tool window and set its parent to self.parent() while having self as its contained widget """ if not hasattr(self, '_toolWindow'): self._toolWindow = QToolWindow(self, self.parent()) elif self._toolWindow.centralwidget!=self: self._toolWindow.window.setCentralWidget(self) return self._toolWindow def changeEvent(self, event): """ changeEvent(event: QEvent) -> None Make sure to update the tool parent when to match the widget's real parent """ if (event.type()==QtCore.QEvent.ParentChange and hasattr(self, '_toolWindow')): if self.parent()!=self._toolWindow: self._toolWindow.setParent(self.parent()) def setToolWindowAcceptDrops(self, value): self.toolWindow().setAcceptDrops(value) ############################################################################### class QDockContainer(QtGui.QMainWindow): """ QDockContainer is a window that can contain dock widgets while still be contained in a tool window. It is just a straight inheritance from QMainWindow """ def __init__(self, parent=None): """ QMainWindow(parent: QWidget) -> QMainWindow Setup window to have its widget dockable everywhere """ QtGui.QMainWindow.__init__(self, parent) self.setDockNestingEnabled(True) ############################################################################### class QSearchTreeWidget(QtGui.QTreeWidget): """ QSearchTreeWidget is just a QTreeWidget with a support function to refine itself when searching for some text """ def __init__(self, parent=None): """ QSearchTreeWidget(parent: QWidget) -> QSearchTreeWidget Set up size policy and header """ QtGui.QTreeWidget.__init__(self, parent) self.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) self.setRootIsDecorated(True) self.setDragEnabled(True) self.flags = QtCore.Qt.ItemIsDragEnabled def searchItemName(self, name): """ searchItemName(name: QString) -> None Search and refine the module tree widget to contain only items whose name is 'name' """ matchedItems = [] def recursiveSetVisible(item, testFunction): """ recursiveSetVisible Pass through all items of a item """ enabled = testFunction(item) visible = enabled child = item.child for childIndex in xrange(item.childCount()): visible |= recursiveSetVisible(child(childIndex), testFunction) # if item is hidden or has changed visibility if not visible or (item.isHidden() != (not visible)): item.setHidden(not visible) if visible: f = item.flags() b = f & self.flags if enabled: if not b: item.setFlags(f | self.flags) elif b: item.setFlags(f & ~self.flags) return visible if str(name)=='': testFunction = lambda x: True else: matchedItems = set(self.findItems(name, QtCore.Qt.MatchContains | QtCore.Qt.MatchWrap | QtCore.Qt.MatchRecursive)) testFunction = matchedItems.__contains__ for itemIndex in xrange(self.topLevelItemCount()): recursiveSetVisible(self.topLevelItem(itemIndex), testFunction) def mimeData(self, itemList): """ mimeData(itemList) -> None Setup the mime data to contain itemList because Qt 4.2.2 implementation doesn't instantiate QTreeWidgetMimeData anywhere as it's supposed to. It must have been a bug... """ data = QtGui.QTreeWidget.mimeData(self, itemList) data.items = itemList return data def setMatchedFlags(self, flags): """ setMatchedFlags(flags: QItemFlags) -> None Set the flags for matched item in the search tree. Parents of matched node will be visible with these flags off. """ self.flags = flags class QSearchTreeWindow(QtGui.QWidget): """ QSearchTreeWindow contains a search box on top of a tree widget for easy search and refine. The subclass has to implement createTreeWidget() method to return a tree widget that is also needs to expose searchItemName method """ def __init__(self, parent=None): """ QSearchTreeWindow(parent: QWidget) -> QSearchTreeWindow Intialize all GUI components """ QtGui.QWidget.__init__(self, parent) self.setWindowTitle('Search Tree') vLayout = QtGui.QVBoxLayout(self) vLayout.setMargin(0) vLayout.setSpacing(0) self.setLayout(vLayout) self.searchBox = QSearchBox(False, True, self) vLayout.addWidget(self.searchBox) self.treeWidget = self.createTreeWidget() vLayout.addWidget(self.treeWidget) self.connect(self.searchBox, QtCore.SIGNAL('executeIncrementalSearch(QString)'), self.treeWidget.searchItemName) self.connect(self.searchBox, QtCore.SIGNAL('executeSearch(QString)'), self.treeWidget.searchItemName) self.connect(self.searchBox, QtCore.SIGNAL('resetSearch()'), self.clearTreeWidget) def clearTreeWidget(self): """ clearTreeWidget(): Return the default search tree """ self.treeWidget.searchItemName(QtCore.QString('')) def createTreeWidget(self): """ createTreeWidget() -> QSearchTreeWidget Return a default searchable tree widget """ return QSearchTreeWidget(self) class QPromptWidget(QtGui.QLabel): """ QPromptWidget is a widget that will display a prompt text when it doesn't have any child visible, or else, it will disappear. This is good for drag and drop prompt. The inheritance should call setPromptText and showPrompt in appropriate time to show/hide the prompt text """ def __init__(self, parent=None): """ QPromptWidget(parent: QWidget) -> QPromptWidget Set up the font and alignment for the prompt """ QtGui.QLabel.__init__(self, parent) self.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter) self.setWordWrap(True) self.regularFont = self.font() self.promptFont = QtGui.QFont(self.font()) self.promptFont.setItalic(True) self.promptText = '' self.promptVisible = False def setPromptText(self, text): """ setPromptText(text: str) -> None Set the prompt text string """ self.promptText = text def showPrompt(self, show=True): """ showPrompt(show: boolean) -> None Show/Hide the prompt """ if show!=self.promptVisible: self.promptVisible = show self.repaint(self.rect()) def showPromptByChildren(self): """ showPromptByChildren() Show/Hide the prompt based on the current state of children """ if self.promptText=='': self.showPrompt(False) else: self.showPrompt(self.layout()==None or self.layout().count()==0) def paintEvent(self, event): """ paintEvent(event: QPaintEvent) -> None Paint the prompt in the center if neccesary """ if self.promptVisible: painter = QtGui.QPainter(self) painter.setFont(self.promptFont) painter.drawText(self.rect(), QtCore.Qt.AlignCenter | QtCore.Qt.TextWordWrap, self.promptText) painter.end() QtGui.QLabel.paintEvent(self, event) # super(QPromptWidget, self).paintEvent(event) class QStringEdit(QtGui.QFrame): """ QStringEdit is a line edit that has an extra button to allow user to use a file as the value """ def __init__(self, parent=None): """ QStringEdit(parent: QWidget) -> QStringEdit Create a hbox layout to contain a line edit and a button """ QtGui.QFrame.__init__(self, parent) hLayout = QtGui.QHBoxLayout(self) hLayout.setMargin(0) hLayout.setSpacing(0) self.setLayout(hLayout) self.lineEdit = QtGui.QLineEdit() self.lineEdit.setFrame(False) self.lineEdit.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) hLayout.addWidget(self.lineEdit) self.setFocusProxy(self.lineEdit) self.fileButton = QtGui.QToolButton() self.fileButton.setText('...') self.fileButton.setSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Expanding) self.fileButton.setFocusPolicy(QtCore.Qt.NoFocus) self.fileButton.setAutoFillBackground(True) self.connect(self.fileButton, QtCore.SIGNAL('clicked()'), self.insertFileNameDialog) hLayout.addWidget(self.fileButton) def setText(self, text): """ setText(text: QString) -> None Overloaded function for setting the line edit text """ self.lineEdit.setText(text) def text(self): """ text() -> QString Overloaded function for getting the line edit text """ return self.lineEdit.text() def selectAll(self): """ selectAll() -> None Overloaded function for selecting all the text """ self.lineEdit.selectAll() def setFrame(self, frame): """ setFrame(frame: bool) -> None Show/Hide the frame of this widget """ if frame: self.setFrameStyle(QtGui.QFrame.StyledPanel | QtGui.QFrame.Plain) else: self.setFrameStyle(QtGui.QFrame.NoFrame) def insertFileNameDialog(self): """ insertFileNameDialog() -> None Allow user to insert a file name as a value to the string """ fileName = QtGui.QFileDialog.getOpenFileName(self, 'Use Filename ' 'as Value...', self.text(), 'All files ' '(*.*)') if not fileName.isEmpty(): self.setText(fileName) ############################################################################### class MultiLineWidget(StandardConstantWidget): def __init__(self, contents, contentType, parent=None): """__init__(contents: str, contentType: str, parent: QWidget) -> StandardConstantWidget Initialize the line edit with its contents. Content type is limited to 'int', 'float', and 'string' """ StandardConstantWidget.__init__(self, parent) def update_parent(self): pass def keyPressEvent(self, event): """ keyPressEvent(event) -> None If this is a string line edit, we can use Ctrl+Enter to enter the file name """ k = event.key() s = event.modifiers() if ((k == QtCore.Qt.Key_Enter or k == QtCore.Qt.Key_Return) and s & QtCore.Qt.ShiftModifier): event.accept() if self.contentIsString and self.multiLines: fileNames = QtGui.QFileDialog.getOpenFileNames(self, 'Use Filename ' 'as Value...', self.text(), 'All files ' '(*.*)') fileName = fileNames.join(',') if not fileName.isEmpty(): self.setText(fileName) return QtGui.QLineEdit.keyPressEvent(self,event) ############################################################################### class QSearchEditBox(QtGui.QComboBox): def __init__(self, incremental=True, parent=None): QtGui.QComboBox.__init__(self, parent) self.setEditable(True) self.setInsertPolicy(QtGui.QComboBox.InsertAtTop) self.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed) regexp = QtCore.QRegExp("\S.*") self.setDuplicatesEnabled(False) validator = QtGui.QRegExpValidator(regexp, self) self.setValidator(validator) self.addItem('Clear Recent Searches') item = self.model().item(0, 0) font = QtGui.QFont(item.font()) font.setItalic(True) item.setFont(font) self.is_incremental = incremental def keyPressEvent(self, e): if e.key() in (QtCore.Qt.Key_Return,QtCore.Qt.Key_Enter): if self.currentText(): if not self.is_incremental: self.emit(QtCore.SIGNAL('executeSearch(QString)'), self.currentText()) self.insertItem(0, self.currentText()) else: self.emit(QtCore.SIGNAL('resetText()')) return QtGui.QComboBox.keyPressEvent(self, e) ############################################################################### class QSearchBox(QtGui.QWidget): """ QSearchBox contains a search combo box with a clear button and a search icon. """ def __init__(self, refine=True, incremental=True, parent=None): """ QSearchBox(parent: QWidget) -> QSearchBox Intialize all GUI components """ QtGui.QWidget.__init__(self, parent) self.setWindowTitle('Search') hLayout = QtGui.QHBoxLayout(self) hLayout.setMargin(0) hLayout.setSpacing(2) self.setLayout(hLayout) self.searchEdit = QSearchEditBox(incremental, self) #TODO: Add separator! self.searchEdit.clearEditText() if refine: self.actionGroup = QtGui.QActionGroup(self) self.searchAction = QtGui.QAction('Search', self) self.searchAction.setCheckable(True) self.actionGroup.addAction(self.searchAction) self.refineAction = QtGui.QAction('Refine', self) self.refineAction.setCheckable(True) self.actionGroup.addAction(self.refineAction) self.searchAction.setChecked(True) self.searchMenu = QtGui.QMenu() self.searchMenu.addAction(self.searchAction) self.searchMenu.addAction(self.refineAction) self.searchButton = QtGui.QToolButton(self) self.searchButton.setIcon(CurrentTheme.QUERY_ARROW_ICON) self.searchButton.setAutoRaise(True) self.searchButton.setPopupMode(QtGui.QToolButton.InstantPopup) self.searchButton.setMenu(self.searchMenu) hLayout.addWidget(self.searchButton) self.connect(self.searchAction, QtCore.SIGNAL('triggered()'), self.searchMode) self.connect(self.refineAction, QtCore.SIGNAL('triggered()'), self.refineMode) else: self.searchLabel = QtGui.QLabel(self) pix = CurrentTheme.QUERY_VIEW_ICON.pixmap(QtCore.QSize(16,16)) self.searchLabel.setPixmap(pix) self.searchLabel.setAlignment(QtCore.Qt.AlignCenter) self.searchLabel.setMargin(4) hLayout.addWidget(self.searchLabel) hLayout.addWidget(self.searchEdit) self.resetButton = QtGui.QToolButton(self) self.resetButton.setIcon(QtGui.QIcon( self.style().standardPixmap(QtGui.QStyle.SP_DialogCloseButton))) self.resetButton.setIconSize(QtCore.QSize(12,12)) self.resetButton.setAutoRaise(True) self.resetButton.setEnabled(False) hLayout.addWidget(self.resetButton) self.manualResetEnabled = False self.connect(self.resetButton, QtCore.SIGNAL('clicked()'), self.resetSearch) self.connect(self.searchEdit, QtCore.SIGNAL('activated(int)'), self.executeSearch) self.connect(self.searchEdit, QtCore.SIGNAL('resetText'), self.resetSearch) self.connect(self.searchEdit, QtCore.SIGNAL('executeSearch(QString)'), self.executeTextSearch) if incremental: self.connect(self.searchEdit, QtCore.SIGNAL('editTextChanged(QString)'), self.executeIncrementalSearch) else: self.connect(self.searchEdit, QtCore.SIGNAL('editTextChanged(QString)'), self.resetToggle) def resetSearch(self): """ resetSearch() -> None Emit a signal to clear the search. """ self.searchEdit.clearEditText() self.resetButton.setEnabled(False) self.manualResetEnabled = False self.emit(QtCore.SIGNAL('resetSearch()')) def clearSearch(self): """ clearSearch() -> None Clear the edit text without emitting resetSearch() signal This is for when the search is rest from the version view and the signal are already taken care of """ self.searchEdit.clearEditText() self.resetButton.setEnabled(False) self.manualResetEnabled = False def searchMode(self): """ searchMode() -> None """ self.emit(QtCore.SIGNAL('refineMode(bool)'), False) def refineMode(self): """ refineMode() -> None """ self.emit(QtCore.SIGNAL('refineMode(bool)'), True) def resetToggle(self, text): self.resetButton.setEnabled((str(text) != '') or self.manualResetEnabled) def executeIncrementalSearch(self, text): """ executeIncrementalSearch(text: QString) -> None The text is changing, so update the search. """ self.resetButton.setEnabled((str(text)!='') or self.manualResetEnabled) self.emit(QtCore.SIGNAL('executeIncrementalSearch(QString)'), text) def executeTextSearch(self, text): self.emit(QtCore.SIGNAL('executeSearch(QString)'), text) def executeSearch(self, index): """ executeSearch(index: int) -> None The text is finished changing or a different item was selected. """ count = self.searchEdit.count() if index == count-1: for i in xrange(count-1): self.searchEdit.removeItem(0) self.resetSearch() else: self.resetButton.setEnabled(True) self.emit(QtCore.SIGNAL('executeSearch(QString)'), self.searchEdit.currentText()) def getCurrentText(self): return str(self.searchEdit.currentText()) def setManualResetEnabled(self, boolVal): self.manualResetEnabled = boolVal self.resetButton.setEnabled((self.getCurrentText() != '') or self.manualResetEnabled) ############################################################################### class QTabBarDetachButton(QtGui.QAbstractButton): """QTabBarDetachButton is a special button to be added to a tab """ def __init__(self, parent): QtGui.QAbstractButton.__init__(self) self.setFocusPolicy(QtCore.Qt.NoFocus) self.setCursor(QtCore.Qt.ArrowCursor) self.setToolTip("Detach Tab") self.setIcon(CurrentTheme.DETACH_TAB_ICON) self.activePixmap = self.icon().pixmap(self.sizeHint(), mode=QtGui.QIcon.Active) self.normalPixmap = self.icon().pixmap(self.sizeHint(), mode=QtGui.QIcon.Normal) self.resize(self.sizeHint()) def sizeHint(self): self.ensurePolished() size = QtCore.QSize() if not self.icon().isNull(): iconSize = self.style().pixelMetric(QtGui.QStyle.PM_SmallIconSize, None, self) sz = self.icon().actualSize(QtCore.QSize(iconSize, iconSize)) size = max(sz.width(), sz.height()) return QtCore.QSize(size, size) def enterEvent(self, event): if self.isEnabled(): icon = QtGui.QIcon(self.activePixmap) self.setIcon(icon) self.update() else: icon = QtGui.QIcon(self.normalPixmap) self.setIcon(icon) QtGui.QAbstractButton.enterEvent(self, event) def leaveEvent(self, event): icon = QtGui.QIcon(self.normalPixmap) self.setIcon(icon) if self.isEnabled(): self.update() QtGui.QAbstractButton.leaveEvent(self, event) def closePosition(self): tb = self.parent() if isinstance(tb, QtGui.QTabBar): close_position = self.style().styleHint(QtGui.QStyle.SH_TabBar_CloseButtonPosition, None, tb) return close_position return -1 def otherPosition(self): tb = self.parent() if isinstance(tb, QtGui.QTabBar): close_position = self.closePosition() if close_position == QtGui.QTabBar.LeftSide: position = QtGui.QTabBar.RightSide else: position = QtGui.QTabBar.LeftSide return position return -1 def paintEvent(self, event): p = QtGui.QPainter(self) opt = QtGui.QStyleOptionToolButton() opt.init(self) opt.state |= QtGui.QStyle.State_AutoRaise if (self.isEnabled() and self.underMouse() and not self.isChecked() and not self.isDown()): opt.state |= QtGui.QStyle.State_Raised if self.isChecked(): opt.state |= QtGui.QStyle.State_On if self.isDown(): opt.state |= QtGui.QStyle.State_Sunken tb = self.parent() if isinstance(tb, QtGui.QTabBar): index = tb.currentIndex() position = self.otherPosition() if tb.tabButton(index, position) == self: opt.state |= QtGui.QStyle.State_Selected opt.icon = self.icon() opt.subControls = QtGui.QStyle.SC_None opt.activeSubControls = QtGui.QStyle.SC_None opt.features = QtGui.QStyleOptionToolButton.None opt.arrowType = QtCore.Qt.NoArrow size = self.style().pixelMetric(QtGui.QStyle.PM_SmallIconSize, None, self) opt.iconSize = QtCore.QSize(size,size) self.style().drawComplexControl(QtGui.QStyle.CC_ToolButton, opt, p, self) ############################################################################### class QMouseTabBar(QtGui.QTabBar): """QMouseTabBar is a QTabBar that emits a signal when a tab receives a mouse event. For now only doubleclick events are emitted.""" #signals tabDoubleClicked = pyqtSignal(int,QtCore.QPoint) def __init__(self, parent=None): QtGui.QTabBar.__init__(self, parent) def mouseDoubleClickEvent(self, event): if event.button() == QtCore.Qt.LeftButton: tab_idx = self.tabAt(event.pos()) if tab_idx != -1: self.tabDoubleClicked.emit(tab_idx, event.pos()) QtGui.QTabBar.mouseDoubleClickEvent(self, event) ############################################################################### class QDockPushButton(QtGui.QPushButton): """QDockPushButton is a button to be used inside QDockWidgets. It will set the minimum height on Mac so it looks nice on both Mac and Windows""" def __init__(self, text, parent=None): QtGui.QPushButton.__init__(self, text, parent) if systemType in ['Darwin']: self.setMinimumHeight(32)
CMUSV-VisTrails/WorkflowRecommendation
vistrails/gui/common_widgets.py
Python
bsd-3-clause
31,635
from django.test import TestCase from gdz.models import GdzClas from django.test import Client from django.core.urlresolvers import reverse class TestGdzClas(TestCase): fixtures = ['gdz_clas.json'] def test_clas_view(self): clases = GdzClas.objects.all() client = Client() for clas in clases: response = client.get(reverse('gdz:clas', args={clas.slug})) self.assertEqual(response.status_code, 200)
audiua/shkolyar_django
gdz/tests/tests_gdz_clas.py
Python
mit
458
from .formSubmission import FormSubmission from django.contrib.auth.models import User from django.db import models from django.template.defaultfilters import slugify class Log(models.Model): """ Form Submission Log Database Model Attributes: * owner - user submitting the message * submission - form submission associated * timestamp - time of submission entry * private - display to non-owners? * message - log entry * mtype - type of log entry * 1 - user message (default) * 2 - system action * 3 - form status change * 4 - attached file * file - attached file entry """ owner = models.ForeignKey(User, blank=True, null=True) submission = models.ForeignKey(FormSubmission) timestamp = models.DateTimeField(auto_now_add=True) private = models.BooleanField(default=False) message = models.TextField(blank=True) mtype = models.IntegerField(default=1) file = models.FileField(upload_to='private/constellation_forms/log_files/') class Meta: db_table = "form_log" ordering = ("timestamp",) @property def extension(self): return self.file.name.split(".")[-1] @property def content_type(self): if self.extension == "pdf": return "application/pdf" if self.extension == "txt": return "text/plain" if self.extension == "png": return "image/png" if self.extension == "jpeg" or self.extension == "jpg": return "image/jpeg" if self.extension == "gif": return "image/gif" return "application/force-download" @property def file_name(self): return slugify("{0}_{1}_{2}".format(self.submission.form.name, self.pk, self.owner.username)) + "." + \ self.extension
ConstellationApps/Forms
constellation_forms/models/log.py
Python
isc
1,879
# Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Eq 10 in Google CP Solver. Standard benchmark problem. Compare with the following models: * MiniZinc: http://hakank.org/minizinc/eq10.mzn * ECLiPSe: http://hakank.org/eclipse/eq10.ecl * SICStus: http://hakank.org/sicstus/eq10.pl This model was created by Hakan Kjellerstrand (hakank@bonetmail.com) Also see my other Google CP Solver models: http://www.hakank.org/google_or_tools/ """ from ortools.constraint_solver import pywrapcp def main(): # Create the solver. solver = pywrapcp.Solver("Eq 10") # # data # n = 7 # # variables # X = [solver.IntVar(0, 10, "X(%i)" % i) for i in range(n)] X1, X2, X3, X4, X5, X6, X7 = X # # constraints # solver.Add(0 + 98527 * X1 + 34588 * X2 + 5872 * X3 + 59422 * X5 + 65159 * X7 == 1547604 + 30704 * X4 + 29649 * X6) solver.Add( 0 + 98957 * X2 + 83634 * X3 + 69966 * X4 + 62038 * X5 + 37164 * X6 + 85413 * X7 == 1823553 + 93989 * X1) solver.Add(900032 + 10949 * X1 + 77761 * X2 + 67052 * X5 == 0 + 80197 * X3 + 61944 * X4 + 92964 * X6 + 44550 * X7) solver.Add(0 + 73947 * X1 + 84391 * X3 + 81310 * X5 == 1164380 + 96253 * X2 + 44247 * X4 + 70582 * X6 + 33054 * X7) solver.Add(0 + 13057 * X3 + 42253 * X4 + 77527 * X5 + 96552 * X7 == 1185471 + 60152 * X1 + 21103 * X2 + 97932 * X6) solver.Add(1394152 + 66920 * X1 + 55679 * X4 == 0 + 64234 * X2 + 65337 * X3 + 45581 * X5 + 67707 * X6 + 98038 * X7) solver.Add(0 + 68550 * X1 + 27886 * X2 + 31716 * X3 + 73597 * X4 + 38835 * X7 == 279091 + 88963 * X5 + 76391 * X6) solver.Add(0 + 76132 * X2 + 71860 * X3 + 22770 * X4 + 68211 * X5 + 78587 * X6 == 480923 + 48224 * X1 + 82817 * X7) solver.Add(519878 + 94198 * X2 + 87234 * X3 + 37498 * X4 == 0 + 71583 * X1 + 25728 * X5 + 25495 * X6 + 70023 * X7) solver.Add(361921 + 78693 * X1 + 38592 * X5 + 38478 * X6 == 0 + 94129 * X2 + 43188 * X3 + 82528 * X4 + 69025 * X7) # # search and result # db = solver.Phase(X, solver.INT_VAR_SIMPLE, solver.INT_VALUE_SIMPLE) solver.NewSearch(db) num_solutions = 0 while solver.NextSolution(): num_solutions += 1 print "X:", [X[i].Value() for i in range(n)] print solver.EndSearch() print print "num_solutions:", num_solutions print "failures:", solver.Failures() print "branches:", solver.Branches() print "WallTime:", solver.WallTime() if __name__ == "__main__": main()
capturePointer/or-tools
examples/python/eq10.py
Python
apache-2.0
3,143
from opendc.models.memory import Memory from opendc.util.rest import Response def GET(request): """Get a list of the specifications of all Memories.""" # Get the Memories memories = Memory.query() # Return the Memories return Response( 200, 'Successfully retrieved Memories.', [x.to_JSON() for x in memories] )
atlarge-research/opendc-web-server
opendc/api/v1/specifications/memories/endpoint.py
Python
mit
365
from __future__ import division from __future__ import unicode_literals from __future__ import print_function from __future__ import absolute_import from builtins import * # NOQA from future import standard_library standard_library.install_aliases() # NOQA from abc import ABCMeta from abc import abstractmethod import chainer import chainer.functions as F from future.utils import with_metaclass from chainerrl.agents.dqn import DQN class AbstractDPP(with_metaclass(ABCMeta, DQN)): """Dynamic Policy Programming. See: https://arxiv.org/abs/1004.2027. """ @abstractmethod def _l_operator(self, qout): raise NotImplementedError() def _compute_target_values(self, exp_batch): batch_next_state = exp_batch['next_state'] if self.recurrent: target_next_qout, _ = self.target_model.n_step_forward( batch_next_state, exp_batch['next_recurrent_state'], output_mode='concat') else: target_next_qout = self.target_model(batch_next_state) next_q_expect = self._l_operator(target_next_qout) batch_rewards = exp_batch['reward'] batch_terminal = exp_batch['is_state_terminal'] return (batch_rewards + exp_batch['discount'] * (1 - batch_terminal) * next_q_expect) def _compute_y_and_t(self, exp_batch): batch_state = exp_batch['state'] batch_size = len(exp_batch['reward']) if self.recurrent: qout, _ = self.model.n_step_forward( batch_state, exp_batch['recurrent_state'], output_mode='concat') else: qout = self.model(batch_state) batch_actions = exp_batch['action'] # Q(s_t,a_t) batch_q = F.reshape(qout.evaluate_actions( batch_actions), (batch_size, 1)) with chainer.no_backprop_mode(): # Compute target values if self.recurrent: target_qout, _ = self.target_model.n_step_forward( batch_state, exp_batch['recurrent_state'], output_mode='concat') else: target_qout = self.target_model(batch_state) # Q'(s_t,a_t) target_q = F.reshape(target_qout.evaluate_actions( batch_actions), (batch_size, 1)) # LQ'(s_t,a) target_q_expect = F.reshape( self._l_operator(target_qout), (batch_size, 1)) # r + g * LQ'(s_{t+1},a) batch_q_target = F.reshape( self._compute_target_values(exp_batch), (batch_size, 1)) # Q'(s_t,a_t) + r + g * LQ'(s_{t+1},a) - LQ'(s_t,a) t = target_q + batch_q_target - target_q_expect return batch_q, t class DPP(AbstractDPP): """Dynamic Policy Programming with softmax operator. Args: eta (float): Positive constant. For other arguments, see DQN. """ def __init__(self, *args, **kwargs): self.eta = kwargs.pop('eta', 1.0) super().__init__(*args, **kwargs) def _l_operator(self, qout): return qout.compute_expectation(self.eta) class DPPL(AbstractDPP): """Dynamic Policy Programming with L operator. Args: eta (float): Positive constant. For other arguments, see DQN. """ def __init__(self, *args, **kwargs): self.eta = kwargs.pop('eta', 1.0) super().__init__(*args, **kwargs) def _l_operator(self, qout): return F.logsumexp(self.eta * qout.q_values, axis=1) / self.eta class DPPGreedy(AbstractDPP): """Dynamic Policy Programming with max operator. This algorithm corresponds to DPP with eta = infinity. """ def _l_operator(self, qout): return qout.max
toslunar/chainerrl
chainerrl/agents/dpp.py
Python
mit
3,779
"""empty message Revision ID: 2ab86fbb564b Revises: 3f049a4d4de8 Create Date: 2014-12-23 22:55:08.182385 """ # revision identifiers, used by Alembic. revision = '2ab86fbb564b' down_revision = '3f049a4d4de8' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### pass ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### pass ### end Alembic commands ###
ossifrage/cliffdivers
migrations/versions/2ab86fbb564b_.py
Python
bsd-3-clause
506
from flask_bootstrap import __version__ as FLASK_BOOTSTRAP_VERSION from flask_nav import Nav from flask_nav.elements import Link, Navbar, Separator, Subgroup, Text, View nav = Nav() nav.register_element('frontend_top', Navbar( View('Flask-Bootstrap', '.index'), View('Home', '.index'), View('Forms Example', '.example_form'), View('Debug-Info', 'debug.debug_root'), Subgroup( 'Docs', Link('Flask-Bootstrap', 'http://pythonhosted.org/Flask-Bootstrap'), Link('Flask-AppConfig', 'https://github.com/mbr/flask-appconfig'), Link('Flask-Debug', 'https://github.com/mbr/flask-debug'), Separator(), Text('Bootstrap'), Link('Getting started', 'http://getbootstrap.com/getting-started/'), Link('CSS', 'http://getbootstrap.com/css/'), Link('Components', 'http://getbootstrap.com/components/'), Link('Javascript', 'http://getbootstrap.com/javascript/'), Link('Customize', 'http://getbootstrap.com/customize/'), ), Text('Using Flask-Bootstrap {}'.format(FLASK_BOOTSTRAP_VERSION)), ))
katakumpo/noscrapy
noscrapy/app/nav/controllers.py
Python
mit
1,089
# -*- coding: utf-8 -*- ############################################################################## # Taobao OpenERP Connector # Copyright 2013 OSCG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from osv import osv, fields from taobao_base import TaobaoMixin from crm import crm import openerp from taobao_base import mq_client from taobao_base import msg_route from .taobao_top import TOP class taobao_shop(osv.osv, TaobaoMixin): _inherit = "taobao.shop" _columns = { #taobao refund 'enable_auto_check_refund': fields.boolean(u'检查退换货'), 'refund_helpdesk_user_id': fields.many2one('res.users', u'默认负责人'), 'refund_helpdesk_section_id': fields.many2one('crm.case.section', u'默认销售团队'), 'refund_helpdesk_channel_id': fields.many2one('crm.case.channel', u'途径'), 'refund_helpdesk_priority': fields.selection(crm.AVAILABLE_PRIORITIES, u'优先级'), 'refund_helpdesk_categ_id': fields.many2one('crm.case.categ', 'Category', \ domain="['|',('section_id','=',False),('section_id','=',section_id),\ ('object_id.model', '=', 'crm.helpdesk')]"), 'refund_remind_user': fields.boolean(u'发送邮件'), } _defaults = { } class taobao_refund(osv.osv, TaobaoMixin): _name = "taobao.refund" _description = "Taobao Refund" _columns = { 'name': fields.char(u'名字', size=256), 'refund_id': fields.char(u'退款ID', size=256), 'tid': fields.char(u'交易ID', size=256), 'oid': fields.char(u'子订单ID', size=256), } def _top_refund_get(self, top, refund_id): rsp =top('taobao.refund.get', refund_id = refund_id, fields = ['refund_id', 'alipay_no', 'tid', 'oid', 'buyer_nick', 'seller_nick', 'total_fee', 'status', 'created', 'refund_fee', 'good_status', 'has_good_return', 'payment', 'reason', 'desc', 'num_iid', 'title', 'price', 'num', 'good_return_time', 'company_name', 'sid', 'address', 'shipping_type', 'refund_remind_timeout']) if rsp and rsp.get('refund', False): return rsp.refund else: return None def refund_ticket_new(self, cr, uid, shop, top, refund_id, remind_user = True): refund = self._top_refund_get(top, refund_id) if refund.seller_nick != shop.taobao_nick: return partner = self.pool.get('res.partner')._get(cr, uid, args = [('taobao_nick','=',refund.buyer_nick)]) order = self.pool.get('sale.order')._get(cr, uid, args = [('taobao_trade_id','=',refund.tid)]) if not (partner and order): self.pool.get('sale.order')._taobao_save_fullinfo(self.pool, cr, uid, refund.tid, shop, top) partner = self.pool.get('res.partner')._get(cr, uid, args = [('taobao_nick','=',refund.buyer_nick)]) order = self.pool.get('sale.order')._get(cr, uid, args = [('taobao_trade_id','=',refund.tid)]) partner_address_id = self.pool.get('res.partner').address_get(cr, uid, [partner.id]).get('default', None) desc = u""" 退款ID: %s 支付宝交易编号: %s 交易编号: %s 子订单编号: %s 买家昵称: %s 卖家昵称: %s 交易总金额: ¥%s 退款状态: %s 退款日期: %s 退款金额: %s 货物状态: %s 买家是否需要退货: %s 支付给卖家的金额: %s 退款原因: %s 退款说明: %s 申请退款的商品数字编号: %s 商品标题: %s 商品价格: %s 商品购买数量: %s """ % (refund.refund_id, refund.alipay_no, refund.tid, refund.oid, refund.buyer_nick, refund.seller_nick, refund.total_fee, refund.status, refund.created, refund.refund_fee, refund.good_status, refund.has_good_return, refund.payment, refund.reason, refund.desc, refund.num_iid, refund.title, refund.price, refund.num) helpdesk_obj = self.pool.get('crm.helpdesk') helpdesk_id = helpdesk_obj.create(cr, uid, { 'name': u'%s | %s | 退款编号:%s' % (refund.created, refund.buyer_nick, refund.refund_id), 'active': True, 'description': desc, 'user_id': shop.refund_helpdesk_user_id.id, 'section_id': shop.refund_helpdesk_section_id.id, 'partner_id': partner.id, 'partner_address_id':partner_address_id if partner_address_id else None, 'ref' : '%s,%s' % ('sale.order', str(order.id)), 'channel_id': shop.refund_helpdesk_channel_id.id, 'priority': shop.refund_helpdesk_priority, 'categ_id': shop.refund_helpdesk_categ_id.id, #'state': 'draft', }) if remind_user: helpdesk_obj.remind_user(cr, uid, [helpdesk_id]) cr.commit() @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundSuccess') def TaobaoRefundSuccess(dbname, uid, app_key, rsp): #退款成功 #TODO receive return goods or pay customer return @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundClosed') def TaobaoRefundClosed(dbname, uid, app_key, rsp): #退款关闭 pass @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundCreated') def TaobaoRefundCreated(dbname, uid, app_key, rsp): #退款创建 notify_refund = rsp.packet.msg.notify_refund pool = openerp.pooler.get_pool(dbname) cr = pool.db.cursor() try: shop = pool.get('taobao.shop')._get(cr, uid, args = [('taobao_app_key','=',app_key)]) if shop.enable_auto_check_refund: top = TOP(shop.taobao_app_key, shop.taobao_app_secret, shop.taobao_session_key) pool.get('taobao.refund').refund_ticket_new(cr, uid, shop, top, notify_refund.refund_id, remind_user = shop.refund_remind_user) cr.commit() finally: cr.close() @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundSellerAgreeAgreement') def TaobaoRefundSellerAgreeAgreement(dbname, uid, app_key, rsp): #卖家同意退#款协议 pass @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundSellerRefuseAgreement') def TaobaoRefundSellerRefuseAgreement(dbname, uid, app_key, rsp): #卖家拒绝退款协议 pass @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundBuyerModifyAgreement') def TaobaoRefundBuyerModifyAgreement(dbname, uid, app_key, rsp): #买家修改退款协议 pass @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundBuyerReturnGoods') def TaobaoRefundBuyerReturnGoods(dbname, uid, app_key, rsp): #买家退货给卖家 #TODO add carrier to incoming picking pass @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundCreateMessage') def TaobaoRefundCreateMessage(dbname, uid, app_key, rsp): #发表退款留言 pass @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundBlockMessage') def TaobaoRefundBlockMessage(dbname, uid, app_key, rsp): #屏蔽退款留言 pass @mq_client @msg_route(code = 202, notify = 'notify_refund', status = 'RefundTimeoutRemind') def TaobaoRefundTimeoutRemind(dbname, uid, app_key, rsp): #退款超时提醒 #TODO send sms to customer? pass # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Johnzero/OE7
openerp/addons-fg/taobao/taobao_refund.py
Python
agpl-3.0
8,384
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file '.\specgram_dlg.ui' # # Created: Thu Jun 19 11:21:55 2014 # by: sparkle.QtWrapper UI code generator 4.9.6 # # WARNING! All changes made in this file will be lost! from sparkle.QtWrapper import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_SpecDialog(object): def setupUi(self, SpecDialog): SpecDialog.setObjectName(_fromUtf8("SpecDialog")) SpecDialog.resize(362, 121) self.verticalLayout = QtGui.QVBoxLayout(SpecDialog) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.formLayout = QtGui.QFormLayout() self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout.setObjectName(_fromUtf8("formLayout")) self.label = QtGui.QLabel(SpecDialog) self.label.setObjectName(_fromUtf8("label")) self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label) self.nfftSpnbx = QtGui.QSpinBox(SpecDialog) self.nfftSpnbx.setMinimum(8) self.nfftSpnbx.setMaximum(4096) self.nfftSpnbx.setProperty("value", 512) self.nfftSpnbx.setObjectName(_fromUtf8("nfftSpnbx")) self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.nfftSpnbx) self.label_2 = QtGui.QLabel(SpecDialog) self.label_2.setObjectName(_fromUtf8("label_2")) self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_2) self.windowCmbx = QtGui.QComboBox(SpecDialog) self.windowCmbx.setObjectName(_fromUtf8("windowCmbx")) self.windowCmbx.addItem(_fromUtf8("")) self.windowCmbx.addItem(_fromUtf8("")) self.windowCmbx.addItem(_fromUtf8("")) self.windowCmbx.addItem(_fromUtf8("")) self.windowCmbx.addItem(_fromUtf8("")) self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.windowCmbx) self.label_3 = QtGui.QLabel(SpecDialog) self.label_3.setObjectName(_fromUtf8("label_3")) self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_3) self.overlapSpnbx = QtGui.QSpinBox(SpecDialog) self.overlapSpnbx.setObjectName(_fromUtf8("overlapSpnbx")) self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.overlapSpnbx) self.verticalLayout.addLayout(self.formLayout) self.buttonBox = QtGui.QDialogButtonBox(SpecDialog) self.buttonBox.setOrientation(QtCore.Qt.Horizontal) self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok) self.buttonBox.setObjectName(_fromUtf8("buttonBox")) self.verticalLayout.addWidget(self.buttonBox) self.retranslateUi(SpecDialog) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), SpecDialog.accept) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), SpecDialog.reject) QtCore.QMetaObject.connectSlotsByName(SpecDialog) def retranslateUi(self, SpecDialog): SpecDialog.setWindowTitle(_translate("SpecDialog", "Dialog", None)) self.label.setText(_translate("SpecDialog", "NFFT", None)) self.label_2.setText(_translate("SpecDialog", "Window function", None)) self.windowCmbx.setItemText(0, _translate("SpecDialog", "Hanning", None)) self.windowCmbx.setItemText(1, _translate("SpecDialog", "Hamming", None)) self.windowCmbx.setItemText(2, _translate("SpecDialog", "Blackman", None)) self.windowCmbx.setItemText(3, _translate("SpecDialog", "Bartlett", None)) self.windowCmbx.setItemText(4, _translate("SpecDialog", "none", None)) self.label_3.setText(_translate("SpecDialog", "% overlap", None))
Joel-U/sparkle
sparkle/gui/dialogs/specgram_dlg_form.py
Python
gpl-3.0
4,157
#!/usr/bin/python2 # # Copyright (C) 2014 FreeIPA Contributors see COPYING for license # from lxml import etree import dns.name from ipapython import ipa_log_manager, ipautil # hack: zone object UUID is stored as path to imaginary zone file ENTRYUUID_PREFIX = "/var/lib/ipa/dns/zone/entryUUID/" ENTRYUUID_PREFIX_LEN = len(ENTRYUUID_PREFIX) class ZoneListReader(object): def __init__(self): self.names = set() # dns.name self.uuids = set() # UUID strings self.mapping = dict() # {UUID: dns.name} self.log = ipa_log_manager.log_mgr.get_logger(self) def _add_zone(self, name, zid): """Add zone & UUID to internal structures. Zone with given name and UUID must not exist.""" # detect duplicate zone names name = dns.name.from_text(name) assert name not in self.names, \ 'duplicate name (%s, %s) vs. %s' % (name, zid, self.mapping) # duplicate non-None zid is not allowed assert not zid or zid not in self.uuids, \ 'duplicate UUID (%s, %s) vs. %s' % (name, zid, self.mapping) self.names.add(name) self.uuids.add(zid) self.mapping[zid] = name def _del_zone(self, name, zid): """Remove zone & UUID from internal structures. Zone with given name and UUID must exist. """ name = dns.name.from_text(name) assert zid is not None assert name in self.names, \ 'name (%s, %s) does not exist in %s' % (name, zid, self.mapping) assert zid in self.uuids, \ 'UUID (%s, %s) does not exist in %s' % (name, zid, self.mapping) assert zid in self.mapping and name == self.mapping[zid], \ 'pair {%s: %s} does not exist in %s' % (zid, name, self.mapping) self.names.remove(name) self.uuids.remove(zid) del self.mapping[zid] class ODSZoneListReader(ZoneListReader): """One-shot parser for ODS zonelist.xml.""" def __init__(self, zonelist_text): super(ODSZoneListReader, self).__init__() xml = etree.fromstring(zonelist_text) self._parse_zonelist(xml) def _parse_zonelist(self, xml): """iterate over Zone elements with attribute 'name' and add IPA zones to self.zones""" for zone_xml in xml.xpath('/ZoneList/Zone[@name]'): name, zid = self._parse_ipa_zone(zone_xml) self._add_zone(name, zid) def _parse_ipa_zone(self, zone_xml): """Extract zone name, input adapter and detect IPA zones. IPA zones have contains Adapters/Input/Adapter element with attribute type = "File" and with value prefixed with ENTRYUUID_PREFIX. Returns: tuple (zone name, ID) """ name = zone_xml.get('name') in_adapters = zone_xml.xpath( 'Adapters/Input/Adapter[@type="File" ' 'and starts-with(text(), "%s")]' % ENTRYUUID_PREFIX) assert len(in_adapters) == 1, 'only IPA zones are supported: %s' \ % etree.tostring(zone_xml) path = in_adapters[0].text # strip prefix from path zid = path[ENTRYUUID_PREFIX_LEN:] return (name, zid) class LDAPZoneListReader(ZoneListReader): def __init__(self): super(LDAPZoneListReader, self).__init__() def process_ipa_zone(self, op, uuid, zone_ldap): assert (op == 'add' or op == 'del'), 'unsupported op %s' % op assert uuid is not None assert 'idnsname' in zone_ldap, \ 'LDAP zone UUID %s without idnsName' % uuid assert len(zone_ldap['idnsname']) == 1, \ 'LDAP zone UUID %s with len(idnsname) != 1' % uuid if op == 'add': self._add_zone(zone_ldap['idnsname'][0], uuid) elif op == 'del': self._del_zone(zone_ldap['idnsname'][0], uuid) class ODSMgr(object): """OpenDNSSEC zone manager. It does LDAP->ODS synchronization. Zones with idnsSecInlineSigning attribute = TRUE in LDAP are added or deleted from ODS as necessary. ODS->LDAP key synchronization has to be solved seperatelly. """ def __init__(self): self.log = ipa_log_manager.log_mgr.get_logger(self) self.zl_ldap = LDAPZoneListReader() def ksmutil(self, params): """Call ods-ksmutil with given parameters and return stdout. Raises CalledProcessError if returncode != 0. """ cmd = ['ods-ksmutil'] + params result = ipautil.run(cmd, capture_output=True) return result.output def get_ods_zonelist(self): stdout = self.ksmutil(['zonelist', 'export']) reader = ODSZoneListReader(stdout) return reader def add_ods_zone(self, uuid, name): zone_path = '%s%s' % (ENTRYUUID_PREFIX, uuid) cmd = ['zone', 'add', '--zone', str(name), '--input', zone_path] output = self.ksmutil(cmd) self.log.info(output) self.notify_enforcer() def del_ods_zone(self, name): # ods-ksmutil blows up if zone name has period at the end name = name.relativize(dns.name.root) # detect if name is root zone if name == dns.name.empty: name = dns.name.root cmd = ['zone', 'delete', '--zone', str(name)] output = self.ksmutil(cmd) self.log.info(output) self.notify_enforcer() self.cleanup_signer(name) def notify_enforcer(self): cmd = ['notify'] output = self.ksmutil(cmd) self.log.info(output) def cleanup_signer(self, zone_name): cmd = ['ods-signer', 'ldap-cleanup', str(zone_name)] output = ipautil.run(cmd, capture_output=True) self.log.info(output) def ldap_event(self, op, uuid, attrs): """Record single LDAP event - zone addition or deletion. Change is only recorded to memory. self.sync() have to be called to synchronize change to ODS.""" assert op == 'add' or op == 'del' self.zl_ldap.process_ipa_zone(op, uuid, attrs) self.log.debug("LDAP zones: %s", self.zl_ldap.mapping) def sync(self): """Synchronize list of zones in LDAP with ODS.""" zl_ods = self.get_ods_zonelist() self.log.debug("ODS zones: %s", zl_ods.mapping) removed = self.diff_zl(zl_ods, self.zl_ldap) self.log.info("Zones removed from LDAP: %s", removed) added = self.diff_zl(self.zl_ldap, zl_ods) self.log.info("Zones added to LDAP: %s", added) for (uuid, name) in removed: self.del_ods_zone(name) for (uuid, name) in added: self.add_ods_zone(uuid, name) def diff_zl(self, s1, s2): """Compute zones present in s1 but not present in s2. Returns: List of (uuid, name) tuples with zones present only in s1.""" s1_extra = s1.uuids - s2.uuids removed = [(uuid, name) for (uuid, name) in s1.mapping.items() if uuid in s1_extra] return removed if __name__ == '__main__': ipa_log_manager.standard_logging_setup(debug=True) ods = ODSMgr() reader = ods.get_ods_zonelist() ipa_log_manager.root_logger.info('ODS zones: %s', reader.mapping)
ofayans/freeipa
ipapython/dnssec/odsmgr.py
Python
gpl-3.0
7,203
""" Copyright (C) 2014-2015, Michele Cappellari E-mail: michele.cappellari_at_physics.ox.ac.uk http://purl.org/cappellari V1.0: Created to emulate my IDL procedure with the same name. Michele Cappellari, Oxford, 28 March 2014 V1.1: Included reversed colormap. MC, Oxford, 9 August 2015 """ from matplotlib import colors ############################################################################## # V1.0: SAURON colormap by Michele Cappellari & Eric Emsellem, Leiden, 10 July 2001 # # Start with these 7 equally spaced coordinates, then add 4 additional points # x = findgen(7)*255/6. + 1 # 1.0 43.5 86.0 128.5 171.0 213.5 256.0 # # x = [1.0, 43.5, 86.0, 86.0+20, 128.5-10, 128.5, 128.5+10, 171.0-20, 171.0, 213.5, 256.0] # red = [0.0, 0.0, 0.4, 0.5, 0.3, 0.0, 0.7, 1.0, 1.0, 1.0, 0.9] # green = [0.0, 0.0, 0.85, 1.0, 1.0, 0.9, 1.0, 1.0, 0.85, 0.0, 0.9] # blue = [0.0, 1.0, 1.0, 1.0, 0.7, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9] _cdict = {'red':[(0.000, 0.01, 0.01), (0.170, 0.0, 0.0), (0.336, 0.4, 0.4), (0.414, 0.5, 0.5), (0.463, 0.3, 0.3), (0.502, 0.0, 0.0), (0.541, 0.7, 0.7), (0.590, 1.0, 1.0), (0.668, 1.0, 1.0), (0.834, 1.0, 1.0), (1.000, 0.9, 0.9)], 'green':[(0.000, 0.01, 0.01), (0.170, 0.0, 0.0), (0.336, 0.85, 0.85), (0.414, 1.0, 1.0), (0.463, 1.0, 1.0), (0.502, 0.9, 0.9), (0.541, 1.0, 1.0), (0.590, 1.0, 1.0), (0.668, 0.85, 0.85), (0.834, 0.0, 0.0), (1.000, 0.9, 0.9)], 'blue':[(0.000, 0.01, 0.01), (0.170, 1.0, 1.0), (0.336, 1.0, 1.0), (0.414, 1.0, 1.0), (0.463, 0.7, 0.7), (0.502, 0.0, 0.0), (0.541, 0.0, 0.0), (0.590, 0.0, 0.0), (0.668, 0.0, 0.0), (0.834, 0.0, 0.0), (1.000, 0.9, 0.9)] } _rdict = {'red':[(0.000, 0.9, 0.9), (0.170, 1.0, 1.0), (0.336, 1.0, 1.0), (0.414, 1.0, 1.0), (0.463, 0.7, 0.7), (0.502, 0.0, 0.0), (0.541, 0.3, 0.3), (0.590, 0.5, 0.5), (0.668, 0.4, 0.4), (0.834, 0.0, 0.0), (1.000, 0.01, 0.01)], 'green':[(0.000, 0.9, 0.9), (0.170, 0.0, 0.0), (0.336, 0.85, 0.85), (0.414, 1.0, 1.0), (0.463, 1.0, 1.0), (0.502, 0.9, 0.9), (0.541, 1.0, 1.0), (0.590, 1.0, 1.0), (0.668, 0.85, 0.85), (0.834, 0.0, 0.0), (1.000, 0.01, 0.01)], 'blue':[(0.000, 0.9, 0.9), (0.170, 0.0, 0.0), (0.336, 0.0, 0.0), (0.414, 0.0, 0.0), (0.463, 0.0, 0.0), (0.502, 0.0, 0.0), (0.541, 0.7, 0.7), (0.590, 1.0, 1.0), (0.668, 1.0, 1.0), (0.834, 1.0, 1.0), (1.000, 0.01, 0.01)] } sauron = colors.LinearSegmentedColormap('sauron', _cdict) sauron_r = colors.LinearSegmentedColormap('sauron_r', _rdict) ############################################################################## # Usage example for the SAURON colormap if __name__ == '__main__': import matplotlib.pyplot as plt import numpy as np n = 41 x, y = np.ogrid[-n:n, -n:n] img = x**2 - 2*y**2 plt.clf() plt.subplot(121) plt.imshow(img, cmap=sauron) plt.title("SAURON colormap") plt.subplot(122) plt.imshow(img, cmap=sauron_r) plt.title("reversed colormap") plt.pause(0.01)
TimothyADavis/KinMSpy
kinms/utils/sauron_colormap.py
Python
mit
4,448
# Example for: restraints.reindex() # This will reindex restraints obtained previously for a simpler topology so # that they will now apply to a more complicated topology. from modeller import * from modeller.scripts import complete_pdb env = environ() env.io.atom_files_directory = ['../atom_files'] tpl = env.libs.topology par = env.libs.parameters # Generate the model for the simpler topology (CA only in this case): tpl.read(file='$(LIB)/top_ca.lib') par.read(file='$(LIB)/par_ca.lib') code = '1fas' mdl = complete_pdb(env, code) mdl.write(file=code+'.ca') # Generate the restraints for the simpler topology: sel = selection(mdl) mdl.restraints.make(sel, restraint_type='stereo', spline_on_site=False) mdl.restraints.write(file='1fas-ca.rsr') sel.energy() # Generate the model for the more complicated topology: tpl.read(file='$(LIB)/top_heav.lib') par.read(file='$(LIB)/par.lib') mdl.read(file=code) aln = alignment(env) aln.append_model(mdl, atom_files=code, align_codes=code) aln.append_model(mdl, atom_files=code+'.ini', align_codes=code+'-ini') mdl.clear_topology() mdl.generate_topology(aln[code+'-ini']) mdl.transfer_xyz(aln) mdl.write(file='1fas.ini') mdl2 = model(env, file='1fas.ca') mdl.restraints.reindex(mdl2) mdl.restraints.write(file='1fas.rsr') sel = selection(mdl) sel.energy()
bjornwallner/proq2-server
apps/modeller9v8/examples/commands/reindex_restraints.py
Python
gpl-3.0
1,309
#!/usr/bin/env python import os, pygame from pygame.compat import xrange_ main_dir = os.path.split(os.path.abspath(__file__))[0] data_dir = os.path.join(main_dir, 'data') def show (image): screen = pygame.display.get_surface() screen.fill ((255, 255, 255)) screen.blit (image, (0, 0)) pygame.display.flip () while 1: event = pygame.event.wait () if event.type == pygame.QUIT: raise SystemExit if event.type == pygame.MOUSEBUTTONDOWN: break def main(): pygame.init () pygame.display.set_mode ((255, 255)) surface = pygame.Surface ((255, 255)) pygame.display.flip () # Create the PixelArray. ar = pygame.PixelArray (surface) r, g, b = 0, 0, 0 # Do some easy gradient effect. for y in xrange_ (255): r, g, b = y, y, y ar[:,y] = (r, g, b) del ar show (surface) # We have made some gradient effect, now flip it. ar = pygame.PixelArray (surface) ar[:] = ar[:,::-1] del ar show (surface) # Every second column will be made blue ar = pygame.PixelArray (surface) ar[::2] = (0, 0, 255) del ar show (surface) # Every second row will be made green ar = pygame.PixelArray (surface) ar[:,::2] = (0, 255, 0) del ar show (surface) # Manipulate the image. Flip it around the y axis. surface = pygame.image.load (os.path.join (data_dir, 'arraydemo.bmp')) ar = pygame.PixelArray (surface) ar[:] = ar[:,::-1] del ar show (surface) # Flip the image around the x axis. ar = pygame.PixelArray (surface) ar[:] = ar[::-1,:] del ar show (surface) # Every second column will be made white. ar = pygame.PixelArray (surface) ar[::2] = (255, 255, 255) del ar show (surface) # Flip the image around both axes, restoring it's original layout. ar = pygame.PixelArray (surface) ar[:] = ar[::-1,::-1] del ar show (surface) # Rotate 90 degrees clockwise. w, h = surface.get_size () surface2 = pygame.Surface ((h, w), surface.get_flags (), surface) ar = pygame.PixelArray (surface) ar2 = pygame.PixelArray (surface2) ar2[...] = ar.transpose ()[::-1,:] del ar, ar2 show (surface2) # Scale it by throwing each second pixel away. surface = pygame.image.load (os.path.join (data_dir, 'arraydemo.bmp')) ar = pygame.PixelArray (surface) sf2 = ar[::2,::2].make_surface () del ar show (sf2) # Replace anything looking like the blue color from the text. ar = pygame.PixelArray (surface) ar.replace ((60, 60, 255), (0, 255, 0), 0.06) del ar show (surface) # Extract anything which might be somewhat black. surface = pygame.image.load (os.path.join (data_dir, 'arraydemo.bmp')) ar = pygame.PixelArray (surface) ar2 = ar.extract ((0, 0, 0), 0.07) sf2 = ar2.surface del ar, ar2 show (sf2) # Compare two images. surface = pygame.image.load (os.path.join (data_dir, 'alien1.gif')) surface2 = pygame.image.load (os.path.join (data_dir, 'alien2.gif')) ar1 = pygame.PixelArray (surface) ar2 = pygame.PixelArray (surface2) ar3 = ar1.compare (ar2, 0.07) sf3 = ar3.surface del ar1, ar2, ar3 show (sf3) if __name__ == '__main__': main()
mark-me/Pi-Jukebox
venv/Lib/site-packages/pygame/examples/pixelarray.py
Python
agpl-3.0
3,318
# -*- coding: utf-8 -*- import common_sale_contract import test_sale_contract
tvtsoft/odoo8
addons/sale_contract/tests/__init__.py
Python
agpl-3.0
78
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Module: l10n_hr # Author: Goran Kliska # mail: goran.kliska(AT)slobodni-programi.hr # Copyright: Slobodni programi d.o.o., Zagreb # Contributions: # Tomislav Bošnjaković, Storm Computers d.o.o. : # - account types # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name": "Croatia - RRIF's 2012 chat of accounts", "description": """ Croatian localisation. ====================== Author: Goran Kliska, Slobodni programi d.o.o., Zagreb http://www.slobodni-programi.hr Contributions: Infokom d.o.o. Storm Computers d.o.o. Description: Croatian Chart of Accounts (RRIF ver.2012) RRIF-ov računski plan za poduzetnike za 2012. Vrste konta Kontni plan prema RRIF-u, dorađen u smislu kraćenja naziva i dodavanja analitika Porezne grupe prema poreznoj prijavi Porezi PDV-a Ostali porezi (samo češće korišteni) povezani s kontima kontnog plana Izvori podataka: http://www.rrif.hr/dok/preuzimanje/rrif-rp2011.rar http://www.rrif.hr/dok/preuzimanje/rrif-rp2012.rar """, "version": "2012.1", "author": "OpenERP Croatian Community", "category": 'Localization/Account Charts', "website": "https://code.launchpad.net/openobject-croatia", 'depends': [ 'account', 'base_vat', 'base_iban', 'account_chart', # 'account_coda', ], 'data': [ 'data/account.account.type.csv', 'data/account.tax.code.template.csv', 'data/account.account.template.csv', 'l10n_hr_wizard.xml', 'data/account.tax.template.csv', 'data/fiscal_position.xml', ], "demo": [], 'test': [], "active": False, "installable": True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
BorgERP/borg-erp-6of3
l10n_hr/l10n_hr/__openerp__.py
Python
agpl-3.0
2,748
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Eric Larson <larson.eric.d@gmail.com> # Joan Massich <mailsik@gmail.com> # Guillaume Favelier <guillaume.favelier@gmail.com> # # License: Simplified BSD import os import sys import pytest import numpy as np from mne.utils import run_subprocess from mne.viz import set_3d_backend, get_3d_backend, Figure3D from mne.viz.backends.renderer import _get_renderer from mne.viz.backends.tests._utils import skips_if_not_pyvistaqt from mne.viz.backends._utils import ALLOWED_QUIVER_MODES @pytest.mark.parametrize('backend', [ pytest.param('pyvistaqt', marks=skips_if_not_pyvistaqt), pytest.param('foo', marks=pytest.mark.xfail(raises=ValueError)), ]) def test_backend_environment_setup(backend, monkeypatch): """Test set up 3d backend based on env.""" monkeypatch.setenv("MNE_3D_BACKEND", backend) monkeypatch.setattr( 'mne.viz.backends.renderer.MNE_3D_BACKEND', None) assert os.environ['MNE_3D_BACKEND'] == backend # just double-check # reload the renderer to check if the 3d backend selection by # environment variable has been updated correctly from mne.viz.backends import renderer renderer.set_3d_backend(backend) assert renderer.MNE_3D_BACKEND == backend assert renderer.get_3d_backend() == backend def test_3d_functions(renderer): """Test figure management functions.""" fig = renderer.create_3d_figure((300, 300)) assert isinstance(fig, Figure3D) wrap_renderer = renderer.backend._Renderer(fig=fig) wrap_renderer.sphere(np.array([0., 0., 0.]), 'w', 1.) renderer.backend._check_3d_figure(fig) renderer.set_3d_view(figure=fig, azimuth=None, elevation=None, focalpoint=(0., 0., 0.), distance=None) renderer.set_3d_title(figure=fig, title='foo') renderer.backend._take_3d_screenshot(figure=fig) renderer.close_3d_figure(fig) renderer.close_all_3d_figures() def test_3d_backend(renderer): """Test default plot.""" # set data win_size = (600, 600) win_color = 'black' tet_size = 1.0 tet_x = np.array([0, tet_size, 0, 0]) tet_y = np.array([0, 0, tet_size, 0]) tet_z = np.array([0, 0, 0, tet_size]) tet_indices = np.array([[0, 1, 2], [0, 1, 3], [0, 2, 3], [1, 2, 3]]) tet_color = 'white' sph_center = np.column_stack((tet_x, tet_y, tet_z)) sph_color = 'red' sph_scale = tet_size / 3.0 ct_scalars = np.array([0.0, 0.0, 0.0, 1.0]) ct_levels = [0.2, 0.4, 0.6, 0.8] ct_surface = { "rr": sph_center, "tris": tet_indices } qv_color = 'blue' qv_scale = tet_size / 2.0 qv_center = np.array([np.mean((sph_center[va, :], sph_center[vb, :], sph_center[vc, :]), axis=0) for (va, vb, vc) in tet_indices]) center = np.mean(qv_center, axis=0) qv_dir = qv_center - center qv_scale_mode = 'scalar' qv_scalars = np.linspace(1.0, 2.0, 4) txt_x = 0.0 txt_y = 0.0 txt_text = "renderer" txt_size = 14 cam_distance = 5 * tet_size # init scene rend = renderer.create_3d_figure( size=win_size, bgcolor=win_color, smooth_shading=True, scene=False, ) for interaction in ('terrain', 'trackball'): rend.set_interaction(interaction) # use mesh mesh_data = rend.mesh( x=tet_x, y=tet_y, z=tet_z, triangles=tet_indices, color=tet_color, ) rend.remove_mesh(mesh_data) # use contour rend.contour(surface=ct_surface, scalars=ct_scalars, contours=ct_levels, kind='line') rend.contour(surface=ct_surface, scalars=ct_scalars, contours=ct_levels, kind='tube') # use sphere rend.sphere(center=sph_center, color=sph_color, scale=sph_scale, radius=1.0) # use quiver3d kwargs = dict( x=qv_center[:, 0], y=qv_center[:, 1], z=qv_center[:, 2], u=qv_dir[:, 0], v=qv_dir[:, 1], w=qv_dir[:, 2], color=qv_color, scale=qv_scale, scale_mode=qv_scale_mode, scalars=qv_scalars, ) for mode in ALLOWED_QUIVER_MODES: rend.quiver3d(mode=mode, **kwargs) with pytest.raises(ValueError, match='Invalid value'): rend.quiver3d(mode='foo', **kwargs) # use tube rend.tube(origin=np.array([[0, 0, 0]]), destination=np.array([[0, 1, 0]])) _, tube = rend.tube(origin=np.array([[1, 0, 0]]), destination=np.array([[1, 1, 0]]), scalars=np.array([[1.0, 1.0]])) # scalar bar rend.scalarbar(source=tube, title="Scalar Bar", bgcolor=[1, 1, 1]) # use text rend.text2d(x_window=txt_x, y_window=txt_y, text=txt_text, size=txt_size, justification='right') rend.text3d(x=0, y=0, z=0, text=txt_text, scale=1.0) rend.set_camera(azimuth=180.0, elevation=90.0, distance=cam_distance, focalpoint=center) rend.reset_camera() rend.show() def test_get_3d_backend(renderer): """Test get_3d_backend function call for side-effects.""" # Test twice to ensure the first call had no side-effect orig_backend = renderer.MNE_3D_BACKEND assert renderer.get_3d_backend() == orig_backend assert renderer.get_3d_backend() == orig_backend def test_renderer(renderer, monkeypatch): """Test that renderers are available on demand.""" backend = renderer.get_3d_backend() cmd = [sys.executable, '-uc', 'import mne; mne.viz.create_3d_figure((800, 600), show=True); ' 'backend = mne.viz.get_3d_backend(); ' 'assert backend == %r, backend' % (backend,)] monkeypatch.setenv('MNE_3D_BACKEND', backend) run_subprocess(cmd) def test_set_3d_backend_bad(monkeypatch, tmp_path): """Test that the error emitted when a bad backend name is used.""" match = "Allowed values are 'pyvistaqt' and 'notebook'" with pytest.raises(ValueError, match=match): set_3d_backend('invalid') # gh-9607 def fail(x): raise ModuleNotFoundError(x) monkeypatch.setattr('mne.viz.backends.renderer._reload_backend', fail) monkeypatch.setattr( 'mne.viz.backends.renderer.MNE_3D_BACKEND', None) # avoid using the config monkeypatch.setenv('_MNE_FAKE_HOME_DIR', str(tmp_path)) match = 'Could not load any valid 3D.*\npyvistaqt: .*' assert get_3d_backend() is None with pytest.raises(RuntimeError, match=match): _get_renderer()
mne-tools/mne-python
mne/viz/backends/tests/test_renderer.py
Python
bsd-3-clause
6,762
from __future__ import absolute_import from sentry import http from sentry.identity.oauth2 import OAuth2Provider def get_user_info(url, access_token): session = http.build_session() resp = session.get( u"https://{}/api/v3/user".format(url), params={"access_token": access_token}, headers={"Accept": "application/vnd.github.machine-man-preview+json"}, verify=False, ) resp.raise_for_status() resp = resp.json() return resp class GitHubEnterpriseIdentityProvider(OAuth2Provider): key = "github_enterprise" name = "GitHub Enterprise" oauth_scopes = () def build_identity(self, data): data = data["data"] # todo(meredith): this doesn't work yet, need to pass in the base url user = get_user_info(data["access_token"]) return { "type": "github_enterprise", "id": user["id"], "email": user["email"], "scopes": [], # GitHub apps do not have user scopes "data": self.get_oauth_data(data), }
mvaled/sentry
src/sentry/identity/github_enterprise/provider.py
Python
bsd-3-clause
1,065
# Copyright 2020 Tecnativa - Sergio Teruel # Copyright 2020 Tecnativa - Carlos Dauden # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). from odoo import fields, models class ProductTemplate(models.Model): _inherit = 'product.template' inventory_availability = fields.Selection(selection_add=[ ('custom_block', 'Block sales on website and display a message custom'), ])
Vauxoo/e-commerce
website_sale_stock_force_block/models/product_template.py
Python
agpl-3.0
416
#!/usr/bin/env python """ Title : Java program file Author : JG Date : dec 2016 Objet : script to create Propertie File Program in : get infos from yml out : print infos in properties file """ import sys,os import yaml import util as u from random import randint # =============================================== # FUNCTION create Java File Properties # in : get infos from csv # out : print infos in java file # =============================================== def create_properties_file(yml,armaDir): progDir = u.define_prop_path(armaDir) filename = progDir+""+u.get_program_name(yml)+".properties" out = open(filename, 'w') out.write("#Armadillo Workflow Platform 1.1 (c) Etienne Lord, Mickael Leclercq, Alix Boc, Abdoulaye Banire Diallo, Vladimir Makarenkov"+ "\n#"+yml['author']+ "\n#"+yml['date']+ "\n#Pgrogram info"+ "\nName= "+yml['Program']['name']+ "\nClassName= programs."+u.get_program_name(yml)+""+ "\nEditorClassName= editors."+u.get_program_name(yml)+"Editors"+ "\ndebug= false"+ "\nfilename= C\:\\armadillo2\\data\\properties\\"+u.get_program_name(yml)+".properties") for paths in yml['Program']['executablePaths']: out.write("\n"+paths+"="+yml['Program']['executablePaths'][paths]) out.write("\nHelpSupplementary=") if yml['Program']['helpSupplementary']: out.write(yml['Program']['helpSupplementary']) out.write("\nPublication= ") if yml['Program']['publication']: out.write(yml['Program']['publication']) out.write("\nDescription= ") if yml['Program']['desc']: out.write(yml['Program']['desc']) ObjectID = randint(1000000000,9999999999) out.write("\nObjectID="+u.get_program_name(yml)+"_"+str(ObjectID)+""+ "\nObjectType=Program"+ "\nNoThread=false") out.write("\nType=") if yml['Program']['menu']: out.write(yml['Program']['menu']) out.write("\nNormalExitValue=") if yml['Program']['exitValue'] or yml['Program']['exitValue'] == 0: out.write(str(yml['Program']['exitValue'])) out.write("\nVerifyExitValue=") if yml['Program']['exitValue']: out.write('true') else: out.write('false') out.write("\nWebServices=") if yml['Program']['webServices']: out.write(yml['Program']['webServices']) out.write("\nWebsite=") if yml['Program']['website']: out.write(yml['Program']['website']) # Color options color = u.get_color(yml) out.write("\ncolorMode = "+color+""+ "\ndefaultColor = "+color+"") # Inputs types out.write("\n#INPUTS TYPES") if len(yml['Inputs']) > 0: o = "" s = "" for op in yml['Inputs']: if op['type']: out.write("\nInput"+op['type']+"=Connector"+str(op['connector'])) if op['OneConnectorOnlyFor']: if o == "": o = str(op['OneConnectorOnlyFor']) else: t = str(op['OneConnectorOnlyFor']) if t not in o: o = o+","+t if op['SolelyConnectors']: if s == "": s = str(op['SolelyConnectors']) else: t = str(op['SolelyConnectors']) if t not in o: s = s+","+t # Inputs options if o != "" or s != "": out.write("\n#INPUTS OPTIONS") if o != "": out.write("\nOneConnectorOnlyFor="+o) if s != "": out.write("\nSolelyConnectors= "+s) else: out.write("\nNO IMPUTS ??\n") # Inputs Names out.write("\n#INPUTS Connector text") tab = ('2','3','4') for t in tab: c = "" if len(yml['Inputs']) > 0: for op in yml['Inputs']: o = str(op['connector']) if t in o or "true" in o: if c == "": c = str(op['connectorText']) else: s = str(op['connectorText']) if s not in c: c = c+", "+s if c != "": out.write("\nConnector"+t+"= "+c) # Number of inputs out.write("\nnbInput= ") if yml['Program']['numImputs']: out.write(str(yml['Program']['numImputs'])) # Outputs values out.write("\n#OUTPUTS OPTIONS"+ "\nConnector0Output=True"+ "\nOutputResults=Connector0"+ "\nOutputOutputText=Connector0") if len(yml['Outputs']) > 0: for op in yml['Outputs']: if op['type']: out.write("\nOutput"+op['type']+"=Connector0") # Default Values out.write("\n#DEFAULT VALUES"+ "\ndefaultPgrmValues=") for Panel in yml['Menus']: pNameS = u.name_without_space(Panel['name']) if 'Panel' not in Panel: # Means default option out.write(""+pNameS+"<>true<>") else: for Tab in Panel['Panel']: if 'Arguments' in Tab: tName = Tab['tab'] for Arguments in Tab['Arguments']: cName = Arguments['name'] if 'values' in Arguments and \ Arguments['values'] is not None and \ Arguments['values']['vType'] is not None: vType = Arguments['values']['vType'] v = u.create_value_name(pNameS,tName,cName,vType) vDef = str(Arguments['values']['vDefault']) out.write(v+"<>"+vDef+"<>") out.write("\n#Cluster") if 'Cluster' in yml and yml['Cluster'] is not None: if 'ClusterProgramName' in yml['Cluster']: out.write("\nClusterProgramName="+yml['Cluster']['ClusterProgramName']) if 'ExecutableCluster' in yml['Cluster']: out.write("\nExecutableCluster="+yml['Cluster']['ExecutableCluster']) if 'version' in yml['Program']: out.write("\nVersion= "+u.get_program_version(yml)+"") out.write("\n#Docker") if 'Docker' in yml and yml['Docker'] is not None: if 'DockerImage' in yml['Docker']: out.write("\nDockerImage="+yml['Docker']['DockerImage']) if 'ExecutableDocker' in yml['Docker']: out.write("\nExecutableDocker="+yml['Docker']['ExecutableDocker']) if 'DockerInputs' in yml['Docker']: out.write("\nDockerInputs="+yml['Docker']['DockerInputs']) if 'DockerOutputs' in yml['Docker']: out.write("\nDockerOutputs="+yml['Docker']['DockerOutputs'])
JeGoi/IPa2
packages/java_properties.py
Python
mit
6,870
# -*- coding: utf-8 -*- #!/usr/bin/python import os, re, pyText2pdf from sys import argv from PyPDF2 import PdfFileReader, PdfFileWriter, PdfFileMerger def usage(): print "Usage: %s <directory>" %(argv[0]) exit(0) def find(path,dir): for i in os.listdir(path)[::-1]: if os.path.isfile(path+"/%s"%i): file1.append(path+"/%s"%i) if os.path.isdir(path+"/%s"%i): dir.append(path+"/%s"%i) def loop(directory,directory2): for i in directory: find(i,directory2) def main_loop(): i=0 while True: directory2.append([]) loop(directory2[i],directory2[i+1]) if len(directory2[i+1]) == 0: break i+=1 def ingredients(list1): for i in list1: a = re.search("%s/+(.+)$"%argv[1],i) if a: subdirectory.append(a.groups(1)[0]) def text2pdf_f(i,arg1): i.parseArgs(arg1) i.Convert() def getnumpages(c): for i in range(0,len(c)-1): a = PdfFileReader(open("%s/%s.pdf"%(argv[1],c[i]), "rb")) b = a.getNumPages() numpages.append(numpages[i]+b) def merge_pdfs(c): merge = PdfFileMerger() for i in range(0,len(c)): a = open("%s/%s.pdf"%(argv[1],c[i])) merge.append(a) output = open("output.pdf", "wb") merge.write(output) def clear_pdfs(list1): for i in list1: os.remove("%s/%s.pdf"%(argv[1],i)) def add_bookmark(list1,list2): output = PdfFileWriter() input1 = PdfFileReader(open('output.pdf', 'rb')) num = input1.getNumPages() for i in range(0,num): output.addPage(input1.getPage(i)) for i in range(0,len(list1)): output.addBookmark(list1[i], list2[i]) os.remove("output.pdf") pdf = open("output.pdf", "wb") output.write(pdf) def make_pdfs(): files = [] file_types = {} for i in file1: a = re.search("%s/.+\.(.+)$"%argv[1],i) tmp = [] if a: tmp.append(a.groups(1)[0]) tmp.append(i) files.append(tmp) file_types[a.groups(1)[0]] = "" else: tmp.append("") tmp.append(i) files.append(tmp) for i in files: for k in non_plain_text_filetypes: if k == i[0]: file2.remove(i[1]) break print "This directory contains this file types." for i in file_types.keys(): print i print "and this program converts only plain text files. So non plain text files are ignored." raw_input("Plase any key to continue ... ") for i in range(0,len(file2)): to_pdf_ins.append(pyText2pdf.pyText2Pdf()) arg_list = [] arg_list.append("%s/pyText2pdf.py"%argv[1]) arg_list.append("%s"%file2[i]) text2pdf_f(to_pdf_ins[i],arg_list) print "\nFile merging is completed." def main(): global file1 file1 = [] global file2 file2 = [] global directory directory = [] global directory2 directory2 = [] directory2.append([]) global subdirectory subdirectory = [] global numpages numpages = [0,] global to_pdf_ins to_pdf_ins = [] global non_plain_text_filetypes non_plain_text_filetypes = ["pdf","png","jpg","jpeg","pyc"] if not len(argv) == 2: usage() find(argv[1],directory) loop(directory,directory2[0]) main_loop() file2 = file1 make_pdfs() ingredients(file2) getnumpages(subdirectory) merge_pdfs(subdirectory) clear_pdfs(subdirectory) add_bookmark(subdirectory,numpages) if __name__ == '__main__': main()
aliagdeniz/dir2pdf
dir2pdf.py
Python
gpl-3.0
3,135
import unittest import warnings import scrubadub import scrubadub.detectors.catalogue import scrubadub.utils class OldAPITestCase(unittest.TestCase): def setUp(self): from scrubadub.detectors.text_blob import TextBlobNameDetector scrubadub.detectors.catalogue.register_detector(TextBlobNameDetector, autoload=True) def test_scrubadub_clean(self): """test old scrubadub API""" text = u"John is a cat" self.assertEqual( scrubadub.clean(text), "{{NAME}} is a cat", ) scrubadub.filth.Filth.lookup = scrubadub.utils.Lookup() with warnings.catch_warnings(record=True) as warning_context: warnings.simplefilter("always") try: self.assertEqual( scrubadub.clean(text, replace_with='identifier'), "{{NAME-0}} is a cat", ) finally: warnings.simplefilter("default") self.assertTrue(sum(issubclass(w.category, DeprecationWarning) for w in warning_context) > 0) scrubadub.filth.Filth.lookup = scrubadub.utils.Lookup() with warnings.catch_warnings(record=True) as warning_context: warnings.simplefilter("always") try: self.assertEqual( scrubadub.clean("John spoke with Doug.", replace_with='identifier'), "{{NAME-0}} spoke with {{NAME-1}}.", ) finally: warnings.simplefilter("default") self.assertTrue(sum(issubclass(w.category, DeprecationWarning) for w in warning_context) > 0) scrubadub.filth.Filth.lookup = scrubadub.utils.Lookup() def test_scrubber_clean(self): """test older scrubber API""" scrubber = scrubadub.Scrubber() scrubber.remove_detector('email') text = "contact Joe Duffy at joe@example.com" self.assertEqual( scrubadub.clean(text), "contact {{NAME}} {{NAME}} at {{EMAIL}}", ) def test_filth_class(self): class MyFilth(scrubadub.filth.Filth): type = 'mine' class MyDetector(scrubadub.detectors.Detector): filth_cls = MyFilth def iter_filth(self, text, **kwargs): yield MyFilth(beg=0, end=8, text='My stuff', **kwargs) scrubber = scrubadub.Scrubber() # TODO: Add depreciation warning scrubber.add_detector(MyDetector) text = "My stuff can be found there." self.assertEqual( scrubber.clean(text), "{{MINE}} can be found there.", ) def test_filth_markers(self): prefix = scrubadub.filth.base.Filth.prefix suffix = scrubadub.filth.base.Filth.suffix scrubadub.filth.base.Filth.prefix = '<b>' scrubadub.filth.base.Filth.suffix = '</b>' scrubber = scrubadub.Scrubber() with warnings.catch_warnings(record=True) as warning_context: warnings.simplefilter("always") try: self.assertEqual( scrubber.clean("contact Joe Duffy at joe@example.com"), "contact <b>NAME</b> <b>NAME</b> at <b>EMAIL</b>", ) finally: warnings.simplefilter("default") # Ensure that this is reset, no matter what happens above scrubadub.filth.base.Filth.prefix = prefix scrubadub.filth.base.Filth.suffix = suffix self.assertTrue(sum(issubclass(w.category, DeprecationWarning) for w in warning_context) > 0) def test_regex_filth(self): """Test for a DeprecationWarning when using RegexFilth.""" with warnings.catch_warnings(record=True) as warning_context: warnings.simplefilter("always") try: scrubadub.filth.RegexFilth(0, 2, 'ab') finally: warnings.simplefilter("default") self.assertEqual(sum(issubclass(w.category, DeprecationWarning) for w in warning_context), 1) def tearDown(self) -> None: from scrubadub.detectors.text_blob import TextBlobNameDetector scrubadub.detectors.catalogue.remove_detector(TextBlobNameDetector)
datascopeanalytics/scrubadub
tests/test_api_older.py
Python
mit
4,276
""" Glue execution module to link to the :mod:`fx2 proxymodule <salt.proxy.fx2>`. Depends: :mod:`iDRAC Remote execution module (salt.modules.dracr) <salt.modules.dracr>` For documentation on commands that you can direct to a Dell chassis via proxy, look in the documentation for :mod:`salt.modules.dracr <salt.modules.dracr>`. This execution module calls through to a function in the fx2 proxy module called ``chconfig``. That function looks up the function passed in the ``cmd`` parameter in :mod:`salt.modules.dracr <salt.modules.dracr>` and calls it. .. versionadded:: 2015.8.2 """ import logging import salt.utils.platform log = logging.getLogger(__name__) __proxyenabled__ = ["fx2"] __virtualname__ = "chassis" def __virtual__(): """ Only work on proxy """ if salt.utils.platform.is_proxy(): return __virtualname__ return ( False, "The chassis execution module cannot be loaded: " "this only works in proxy minions.", ) def chassis_credentials(): proxyprefix = __opts__["proxy"]["proxytype"] (username, password) = __proxy__[proxyprefix + ".find_credentials"]() return (username, password) def cmd(cmd, *args, **kwargs): proxyprefix = __opts__["proxy"]["proxytype"] (username, password) = chassis_credentials() kwargs["admin_username"] = username kwargs["admin_password"] = password kwargs["host"] = __proxy__[proxyprefix + ".host"]() proxycmd = __opts__["proxy"]["proxytype"] + ".chconfig" return __proxy__[proxycmd](cmd, *args, **kwargs)
saltstack/salt
salt/modules/chassis.py
Python
apache-2.0
1,556
import unittest class NotificationManagerTest(unittest.TestCase): def test(self): self.assertEqual(4, 4)
dashford/sentinel
tests/Notification/test_notification_manager.py
Python
mit
119
__author__ = 'jdaniel' from GaiaSolve.common import ModelInfo from GaiaSolve.common import GaiaException class Host(object): def __init__(self): self.model = None self.algorithm = None def set_model(self, model): self.model = model self.model.set_host(self) def set_algorithm(self, algorithm): self.algorithm = algorithm self.algorithm.set_host(self) def get_model_info(self): """ Gets the model information :return: populated ModelInfo object """ model_info = ModelInfo() model_info.set_info(self.model) return model_info def run_algorithm(self): """ Runs the algorithm :return: None """ self.algorithm.run() @property def x(self): """ Currently the decision variables are only obtained by the host, a local copy is used via the HostModel in an algorithm :return: None """ err_str = 'ERROR: x cannot be retrieved from the host and must be set and used in the model' raise GaiaException(err_str) @x.setter def x(self, values): """ Set the model's current design variables' values. Values are given to the model and the manager :param values: list of design variable values to set :return: None """ self.model.x = values @property def obj(self): """ Get the model's current objective function values. If the values already exist in the history, use those otherwise calculate the value using the abstract evaluate_objective method :return: List of objective values """ # Check the history to see if the value has already been evaluated #values = self.model.obj #print repr(values) #if values is None: # Values have not been calculated yet self.model.evaluate() values = self.model.obj return values @obj.setter def obj(self, values): """ Set the model's current objective values and add them to the manager's history :param values: List of values to set :return: """ raise GaiaException('obj must be set by the model') @property def eqcon(self): """ Get the model's current equality constraint values. If the values already exist in the manager's history, use those, otherwise calculate the values using the abstract evaluate_equality_constraint method. :return: List of equality constraint values """ values = self.model.eqcon if values is None: self.model.evaluate() values = self.model.eqcon return values @eqcon.setter def eqcon(self, values): """ Add the equality constraint values calculated by the model to the manager's history :param values: List of constraint values to set :return: None """ raise GaiaException('eqcon must be set by the model') @property def neqcon(self): """ Get the model's current inequality constraint values. If the values already exist in the manager's history, use those, otherwise calculate the values using the model's abstract method evaluate_equality_constraint :return: """ values = self.model.neqcon if values is None: self.model.evaluate() values = self.model.neqcon return values @neqcon.setter def neqcon(self, values): """ Add the inequality constraint values calculated by the model to the manager's history :param values: List of the constraint's gradient values to set :return: None """ raise GaiaException('neqcon must be set by the model')
jldaniel/Gaia
GaiaSolve/host.py
Python
mit
3,913
# 4 # / \ # 2 6 # / \ / \ # 1 3 5 7 class Node: rChild, lChild, data = None, None, None def __init__(self, data): self.rChild = None self.lChild = None self.data = data INT_MIN = -4294967296 INT_MAX = 4294967296 def inorder(root): if root is None: return inorder(root.lChild) print root.data inorder(root.rChild) def isBSTUtil(node, min, max): if node is None: return True if node.data < min or node.data > max: return False return (isBSTUtil(node.lChild, min, node.data - 1) and isBSTUtil(node.rChild, node.data + 1, max)) def main(): root = Node(4) root.lChild = Node(2) root.rChild = Node(6) root.lChild.lChild = Node(1) root.lChild.rChild = Node(3) root.rChild.lChild = Node(5) root.rChild.rChild = Node(7) inorder(root) if isBSTUtil(root, INT_MIN, INT_MAX) == True: print "Is BST" if isBSTUtil(root, INT_MIN, INT_MAX) == False: print "Not a BST" if __name__ == '__main__': main()
ruchikd/Algorithms
Python/FindIsBSTaBST/isBSTaBST.py
Python
gpl-3.0
1,012
# Copyright 2010-2014 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from __future__ import division import locale import logging import time from portage import os, _unicode_decode from portage.exception import PortageException from portage.localization import _ from portage.output import EOutput from portage.util import grabfile, writemsg_level def have_english_locale(): lang, enc = locale.getdefaultlocale() if lang is not None: lang = lang.lower() lang = lang.split('_', 1)[0] return lang is None or lang in ('c', 'en') def whenago(seconds): sec = int(seconds) mins = 0 days = 0 hrs = 0 years = 0 out = [] if sec > 60: mins = sec // 60 sec = sec % 60 if mins > 60: hrs = mins // 60 mins = mins % 60 if hrs > 24: days = hrs // 24 hrs = hrs % 24 if days > 365: years = days // 365 days = days % 365 if years: out.append("%dy " % years) if days: out.append("%dd " % days) if hrs: out.append("%dh " % hrs) if mins: out.append("%dm " % mins) if sec: out.append("%ds " % sec) return "".join(out).strip() def old_tree_timestamp_warn(portdir, settings): unixtime = time.time() default_warnsync = 30 timestamp_file = os.path.join(portdir, "metadata/timestamp.x") try: lastsync = grabfile(timestamp_file) except PortageException: return False if not lastsync: return False lastsync = lastsync[0].split() if not lastsync: return False try: lastsync = int(lastsync[0]) except ValueError: return False var_name = 'PORTAGE_SYNC_STALE' try: warnsync = float(settings.get(var_name, default_warnsync)) except ValueError: writemsg_level("!!! %s contains non-numeric value: %s\n" % \ (var_name, settings[var_name]), level=logging.ERROR, noiselevel=-1) return False if warnsync <= 0: return False if (unixtime - 86400 * warnsync) > lastsync: out = EOutput() if have_english_locale(): out.ewarn("Last emerge --sync was %s ago." % \ whenago(unixtime - lastsync)) else: out.ewarn(_("Last emerge --sync was %s.") % \ _unicode_decode(time.strftime( '%c', time.localtime(lastsync)))) return True return False
ptisserand/portage
pym/portage/sync/old_tree_timestamp.py
Python
gpl-2.0
2,161
# This code is meant to be run after loading the model in 'Axon.g'. # 'Axon.g' loads 2 identical copies of an linear, passive neuron. # # These are helper functions to scale passive parameters Cm, Rm, Ra, # diameter and length. By default the second copy ('/axon1') is # modified. # # After scaling a passive parameter, run simulation again, and compare # plots for '/axon' and '/axon1'. import sys sys.path.append('../../python') import moose # from pymoose import tweak_field path = '/axon1' wildcard = path + '/#[TYPE=Compartment]' # def scale_cm( scale ): # tweak_field( wildcard, 'Cm', '{0} * Cm'.format( scale ) ) # def scale_ra( scale ): # tweak_field( wildcard, 'Ra', '{0} * Ra'.format( scale ) ) # def scale_rm( scale ): # tweak_field( wildcard, 'Rm', '{0} * Rm'.format( scale ) ) # def scale_diameter( scale ): # tweak_field( wildcard, 'diameter', '{0} * diameter'.format( scale ) ) # cm_scale = scale # tweak_field( wildcard, 'Cm', '{0} * Cm'.format( cm_scale ) ) # rm_scale = 1.0 / scale # tweak_field( wildcard, 'Rm', '{0} * Rm'.format( rm_scale ) ) # ra_scale = 1.0 / ( scale * scale ) # tweak_field( wildcard, 'Ra', '{0} * Ra'.format( ra_scale ) ) # def scale_length( scale ): # tweak_field( wildcard, 'length', '{0} * length'.format( scale ) ) # cm_scale = scale # tweak_field( wildcard, 'Cm', '{0} * Cm'.format( cm_scale ) ) # rm_scale = 1.0 / scale # tweak_field( wildcard, 'Rm', '{0} * Rm'.format( rm_scale ) ) # ra_scale = scale # tweak_field( wildcard, 'Ra', '{0} * Ra'.format( ra_scale ) ) if __name__ == '__main__': moose.context.loadG('Axon.g')
BhallaLab/moose-thalamocortical
DEMOS/axon-passive/Axon.py
Python
lgpl-2.1
1,610
import math import numpy as np from scipy.optimize import fsolve AIR_DENSITY = 1.2754 # kg/m^3 class Quartet: '''An arbitrary class that holds the four corner values of a car.''' def __init__(self, fr, fl, rr, rl): self.quartet = [fr, fl, rr, rl] @property def quartet(self): return self.__quartet @quartet.setter def quartet(self, data): if len(data) == 4: self.__quartet = data else: self.__quartet = [0, 0, 0, 0] @property def fr(self): return self.quartet[0] @property def fl(self): return self.quartet[1] @property def rr(self): return self.quartet[2] @property def rl(self): return self.quartet[3] class Vehicle: def __init__(self, tires, mass, geometry, suspension, aero, setup): # Tires self.tires = tires # TODO: protect this data member # General self.suspended_mass = mass['suspended_mass'] self.nonsuspended_mass = mass['nonsuspended_mass'] self.driver_mass = mass['driver_mass'] self.cg_height = mass['cg_height'] # CG height [m] self.weightdist_front = mass['weightdist_front'] # front weight dist [%] # Geometry self.wheelbase = geometry['wheelbase'] # wheelbase [m] self.trackwidth_front = geometry['trackwidth_front'] self.trackwidth_rear = geometry['trackwidth_rear'] self.rollcentre_front = geometry['rollcentre_front'] self.rollcentre_rear = geometry['rollcentre_rear'] self.vsal_front = geometry['vsal_front'] self.vsal_rear = geometry['vsal_rear'] # Suspension self.cornerspring_front = suspension['cornerspring_front'] self.cornerspring_rear = suspension['cornerspring_rear'] self.antirollstiffness_front = suspension['antirollstiffness_front'] self.antirollstiffness_rear = suspension['antirollstiffness_rear'] # Aerodynamic self.frontal_area = aero['frontal_area'] self.coeff_lift = aero['coeff_lift'] self.coeff_drag = aero['coeff_drag'] # unused for now self.aero_balance = aero['aero_balance'] # Static Setup # TODO: individual setup for each wheel. self.static_camber_front = setup['static_camber_front'] self.static_camber_rear = setup['static_camber_rear'] self.static_toe_front = setup['static_toe_front'] self.static_toe_rear = setup['static_toe_rear'] '''Mass''' @property def suspended_mass(self): return self.__suspended_mass @suspended_mass.setter def suspended_mass(self, suspended_mass): self.__suspended_mass = suspended_mass @property def nonsuspended_mass(self): return self.__nonsuspended_mass + self.driver_mass @nonsuspended_mass.setter def nonsuspended_mass(self, nonsuspended_mass): self.__nonsuspended_mass = nonsuspended_mass @property def driver_mass(self): return self.__driver_mass @driver_mass.setter def driver_mass(self, driver_mass): self.__driver_mass = driver_mass @property def cg_height(self): return self.__cg_height @cg_height.setter def cg_height(self, cg_height): self.__cg_height = cg_height @property def weightdist_front(self): return self.__weightdist_front @weightdist_front.setter def weightdist_front(self, weightdist_front): self.__weightdist_front = weightdist_front '''Geometry''' @property def wheelbase(self): return self.__wheelbase @wheelbase.setter def wheelbase(self, wheelbase): self.__wheelbase = wheelbase @property def trackwidth_front(self): return self.__trackwidth_front @trackwidth_front.setter def trackwidth_front(self, trackwidth_front): self.__trackwidth_front = trackwidth_front @property def trackwidth_rear(self): return self.__trackwidth_rear @trackwidth_rear.setter def trackwidth_rear(self, trackwidth_rear): self.__trackwidth_rear = trackwidth_rear @property def rollcentre_front(self): return self.__rollcentre_front @rollcentre_front.setter def rollcentre_front(self, rollcentre_front): self.__rollcentre_front = rollcentre_front @property def rollcentre_rear(self): return self.__rollcentre_rear @rollcentre_rear.setter def rollcentre_rear(self, rollcentre_rear): self.__rollcentre_rear = rollcentre_rear @property def vsal_front(self): return self.__vsal_front @vsal_front.setter def vsal_front(self, vsal_front): self.__vsal_front = vsal_front @property def vsal_rear(self): return self.__vsal_rear @vsal_rear.setter def vsal_rear(self, vsal_rear): self.__vsal_rear = vsal_rear '''Suspension''' @property def cornerspring_front(self): return self.__cornerspring_front @cornerspring_front.setter def cornerspring_front(self, cornerspring_front): self.__cornerspring_front = cornerspring_front @property def cornerspring_rear(self): return self.__cornerspring_rear @cornerspring_rear.setter def cornerspring_rear(self, cornerspring_rear): self.__cornerspring_rear = cornerspring_rear @property def antirollstiffness_front(self): return self.__antirollstiffness_front @antirollstiffness_front.setter def antirollstiffness_front(self, antirollstiffness_front): self.__antirollstiffness_front = antirollstiffness_front @property def antirollstiffness_rear(self): return self.__antirollstiffness_rear @antirollstiffness_rear.setter def antirollstiffness_rear(self, antirollstiffness_rear): self.__antirollstiffness_rear = antirollstiffness_rear '''Aerodynamics''' @property def frontal_area(self): return self.__frontal_area @frontal_area.setter def frontal_area(self, frontal_area): self.__frontal_area = frontal_area @property def coeff_lift(self): return self.__coeff_lift @coeff_lift.setter def coeff_lift(self, coeff_lift): self.__coeff_lift = coeff_lift @property def coeff_drag(self): return self.__coeff_drag @coeff_drag.setter def coeff_drag(self, coeff_drag): self.__coeff_drag = coeff_drag @property def aero_balance(self): return self.__aero_balance @aero_balance.setter def aero_balance(self, aero_balance): self.__aero_balance = aero_balance '''Static Setup''' @property def static_camber_front(self): return self.__static_camber_front @static_camber_front.setter def static_camber_front(self, static_camber_front): self.__static_camber_front = math.radians(static_camber_front) @property def static_camber_rear(self): return self.__static_camber_rear @static_camber_rear.setter def static_camber_rear(self, static_camber_rear): self.__static_camber_rear = math.radians(static_camber_rear) @property def static_toe_front(self): return self.__static_toe_front @static_toe_front.setter def static_toe_front(self, static_toe_front): self.__static_toe_front = math.radians(static_toe_front) @property def static_toe_rear(self): return self.__static_toe_rear @static_toe_rear.setter def static_toe_rear(self, static_toe_rear): self.__static_toe_rear = math.radians(static_toe_rear) ''' Derived properties of the vehicle ''' @property def a(self): return self.wheelbase * (1 - self.weightdist_front) @property def b(self): return self.wheelbase * self.weightdist_front @property def mass(self): return self.suspended_mass + self.nonsuspended_mass @property def antiroll_distribution(self): '''Calculates the anti-roll stiffness distribution. This is also known as the first magic number''' # Calculate the anti-roll stiffness per degree from the springs ar_spring_front = self.trackwidth_front**2 * self.cornerspring_front * math.tan(math.radians(1)) / 2 ar_spring_rear = self.trackwidth_rear**2 * self.cornerspring_rear * math.tan(math.radians(1)) / 2 stiffness_front = ar_spring_front + self.antirollstiffness_front stiffness_rear = ar_spring_rear + self.antirollstiffness_rear return stiffness_front / (stiffness_front + stiffness_rear) def calc_aero_downforce(self, velocity): '''Calculates the aerodynamic downforce acting on each wheel''' downforce = 0.5 * AIR_DENSITY * self.frontal_area * self.coeff_lift * velocity ** 2 downforce_fr = downforce * self.aero_balance / 2 downforce_fl = downforce_fr downforce_rr = downforce * (1 - self.aero_balance) / 2 downforce_rl = downforce_rr return Quartet(downforce_fr, downforce_fl, downforce_rr, downforce_rl) def calc_lat_load_transfer(self, a_lat): ''' Calculate the lateral load transfer using the OptimumG Seminar method. Several simplifying assumptions are made in the calculation. - Non-suspended mass CG is located at the centre of the tire - Excludes the tire stiffness when calculating the elastic weight transfer - Constant CG and roll centre locations ''' # Weight transfer at the front axle nonsuspended_front = self.nonsuspended_mass / 2 * a_lat * self.tires.fr.re / self.trackwidth_front geometric_front = self.weightdist_front * self.suspended_mass * a_lat * self.rollcentre_front / self.trackwidth_front elastic_front = self.antiroll_distribution * self.suspended_mass * a_lat * (self.cg_height - self.rollcentre_front) / self.trackwidth_front # Weight transfer at the rear axle nonsuspended_rear = self.nonsuspended_mass / 2 * a_lat * self.tires.rr.re / self.trackwidth_rear geometric_rear = (1 - self.weightdist_front) * self.suspended_mass * a_lat * self.rollcentre_rear / self.trackwidth_rear elastic_rear = (1 - self.antiroll_distribution) * self.suspended_mass * a_lat * (self.cg_height - self.rollcentre_rear) / self.trackwidth_rear # Package the components nicely in a dictionary loadtransfer = {'nonsuspended_front': nonsuspended_front, 'geometric_front': geometric_front, 'elastic_front': elastic_front, 'nonsuspended_rear': nonsuspended_rear, 'geometric_rear': geometric_rear, 'elastic_rear': elastic_rear } # Return all components of the weight transfer. Think of a better name for this. return loadtransfer def calc_vertical_load(self, a_lat, a_long, velocity): '''Calculate the tire vertical load''' loadtransfer = self.calc_lat_load_transfer(a_lat) downforce = self.calc_aero_downforce(velocity) # Sum the components of the weight transfer front_lat_trnsfr = loadtransfer['nonsuspended_front'] + loadtransfer['geometric_front'] + loadtransfer['elastic_front'] rear_lat_trnsfr = loadtransfer['nonsuspended_rear'] + loadtransfer['geometric_rear'] + loadtransfer['elastic_rear'] # Wheel vertical load without weight transfer effects for a single wheel fz_front_wheel = -(9.81 * self.mass * self.weightdist_front / 2) - downforce.fr - downforce.fl fz_rear_wheel = -(9.81 * self.mass * (1 - self.weightdist_front) / 2) - downforce.rr - downforce.rl # Wheel lift assumption - for a given axle, wheel will take entire axle load if math.fabs(front_lat_trnsfr) > math.fabs(fz_front_wheel): front_lat_trnsfr = math.copysign(fz_front_wheel, front_lat_trnsfr) if math.fabs(rear_lat_trnsfr) > math.fabs(fz_rear_wheel): rear_lat_trnsfr = math.copysign(fz_rear_wheel, rear_lat_trnsfr) fz_fr = fz_front_wheel + front_lat_trnsfr fz_fl = fz_front_wheel - front_lat_trnsfr fz_rr = fz_rear_wheel + rear_lat_trnsfr fz_rl = fz_rear_wheel - rear_lat_trnsfr return Quartet(fz_fr, fz_fl, fz_rr, fz_rl) def calc_roll_moment(self, theta): '''Calculate the antiroll stiffness for a given roll angle in radians''' antiroll_front = self.antirollstiffness_front * math.degrees(theta) + self.trackwidth_front**2 * self.cornerspring_front * math.tan(theta) / 2 antiroll_rear = self.antirollstiffness_rear * math.degrees(theta) + self.trackwidth_rear**2 * self.cornerspring_rear * math.tan(theta) / 2 return antiroll_front + antiroll_rear def calc_roll_angle(self, a_lat): '''Calculate the chassis roll angle when subjected to a lateral acceleration''' loadtransfer = self.calc_lat_load_transfer(a_lat) # Calculate the total roll moment to be reacted roll_moment = loadtransfer['elastic_front'] * self.trackwidth_front + loadtransfer['elastic_rear'] * self.trackwidth_rear roll_equation = lambda theta: self.calc_roll_moment(theta) + roll_moment roll_angle = fsolve(roll_equation, 0) return roll_angle.flat[0] def calc_camber_angles(self, a_lat): '''Calculate the tire camber variation using constant VSAL approximation''' # Determine the chassis roll angle roll_angle = self.calc_roll_angle(a_lat) # With the roll angle known, determine the vertical displacment required to return the wheel displacement_front = self.trackwidth_front / 2 * math.tan(roll_angle) displacement_rear = self.trackwidth_rear / 2 * math.tan(roll_angle) # Inclination angle in the 'vehicle' frame of reference gamma_fr = roll_angle - math.atan(displacement_front / self.vsal_front) + self.static_camber_front gamma_fl = -roll_angle + math.atan(displacement_front / self.vsal_front) + self.static_camber_front gamma_rr = roll_angle - math.atan(displacement_rear / self.vsal_rear) + self.static_camber_rear gamma_rl = -roll_angle + math.atan(displacement_rear / self.vsal_rear) + self.static_camber_rear return Quartet(gamma_fr, gamma_fl, gamma_rr, gamma_rl) def calc_slip_angles(self, velocity, yaw_speed, beta): '''Calculate the tire slip angles''' velocity_y = velocity*math.tan(beta) alpha_fr = math.atan((velocity_y + self.a*yaw_speed)/(velocity - self.trackwidth_front/2 * yaw_speed)) + self.static_toe_front alpha_fl = math.atan((velocity_y + self.a*yaw_speed)/(velocity + self.trackwidth_front/2 * yaw_speed)) - self.static_toe_front alpha_rr = math.atan((velocity_y - self.b*yaw_speed)/(velocity - self.trackwidth_rear/2 * yaw_speed)) + self.static_toe_rear alpha_rl = math.atan((velocity_y - self.b*yaw_speed)/(velocity + self.trackwidth_rear/2 * yaw_speed)) - self.static_toe_rear return Quartet(alpha_fr, alpha_fl, alpha_rr, alpha_rl) def calc_lateral_forces(self, fz, alpha, delta, gamma): '''Calculate the lateral force generated by the tires''' # Slip angle sign flip-flop is done here fy_fr = self.tires.fr.calc_fy(fz.fr, alpha.fr+delta, 0, -gamma.fr) fy_fl = -self.tires.fl.calc_fy(fz.fl, -(alpha.fl+delta), 0, -gamma.fl) fy_rr = self.tires.rr.calc_fy(fz.rr, alpha.rr, 0, -gamma.rr) fy_rl = -self.tires.rl.calc_fy(fz.rl, -alpha.rl, 0, -gamma.rl) return Quartet(fy_fr, fy_fl, fy_rr, fy_rl) def calc_self_aligning(self, fz, alpha, delta, gamma): '''Calculate the self aligning torque generated by the tires''' mz_fr = self.tires.fr.calc_mz(fz.fr, alpha.fr+delta, 0, -gamma.fr) mz_fl = -self.tires.fl.calc_mz(fz.fl, -(alpha.fl+delta), 0, -gamma.fl) mz_rr = self.tires.rr.calc_mz(fz.rr, alpha.rr, 0, -gamma.rr) mz_rl = -self.tires.rl.calc_mz(fz.rl, -alpha.rl, 0, -gamma.rl) return Quartet(mz_fr, mz_fl, mz_rr, mz_rl) def calc_vehicle_forces(self, velocity, yaw_speed, a_lat, beta, delta): '''Calculate the resolved forces and moments acting on the car''' alpha = self.calc_slip_angles(velocity, yaw_speed, beta) gamma = self.calc_camber_angles(a_lat) fz = self.calc_vertical_load(a_lat, 0, velocity) # assume a_long is zero fy = self.calc_lateral_forces(fz, alpha, delta, gamma) mz = self.calc_self_aligning(fz, alpha, delta, gamma) # Transformation matrix a11 = math.sin(delta) a12 = math.sin(delta) a13 = 0 a14 = 0 a21 = math.cos(delta) a22 = math.cos(delta) a23 = 1 a24 = 1 a31 = (self.trackwidth_front/2*math.sin(delta) + self.a*math.cos(delta)) a32 = (-self.trackwidth_front/2*math.sin(delta) + self.a*math.cos(delta)) a33 = -self.b a34 = -self.b matrix = np.matrix([[a11, a12, a13, a14], # F_x [a21, a22, a23, a24], # F_y [a31, a32, a33, a34]]) # M_z # FR, FL, RR, RL forces = np.matrix([[fy.fr], [fy.fl], [fy.rr], [fy.rl]]) # Rotate forces and moments to the vehicle frame of reference resolved = matrix * forces # Add in the self-aligning torques resolved.flat[2] = resolved.flat[2] + mz.fr + mz.fl + mz.rr + mz.rl return resolved
kktse/uwfm
ymd/model/Vehicle.py
Python
apache-2.0
17,595
"""Track most recent submission time on profile Used to prevent scans on submission for determining when a user most recently posted something, for ordering marketplace search results Revision ID: 83e6b2a46191 Revises: a49795aa2584 Create Date: 2017-01-07 03:21:10.114125 """ # revision identifiers, used by Alembic. revision = '83e6b2a46191' down_revision = 'a49795aa2584' from alembic import op # lgtm[py/unused-import] import sqlalchemy as sa # lgtm[py/unused-import] import libweasyl def upgrade(): op.add_column('profile', sa.Column('latest_submission_time', libweasyl.models.helpers.WeasylTimestampColumn(), nullable=False, server_default='0')) op.execute(""" UPDATE profile p SET latest_submission_time = ( SELECT COALESCE(MAX(s.unixtime), 0) AS latest FROM submission s WHERE s.userid = p.userid ) """) def downgrade(): op.drop_column('profile', 'latest_submission_time')
Weasyl/weasyl
libweasyl/libweasyl/alembic/versions/83e6b2a46191_track_most_recent_submission_time.py
Python
apache-2.0
975
#!/usr/bin/env python # Jonas Schnelli, 2013 # make sure the JennyCoin-Qt.app contains the right plist (including the right version) # fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267) from string import Template from datetime import date bitcoinDir = "./"; inFile = bitcoinDir+"/share/qt/Info.plist" outFile = "JennyCoin-Qt.app/Contents/Info.plist" version = "unknown"; fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro" for line in open(fileForGrabbingVersion): lineArr = line.replace(" ", "").split("="); if lineArr[0].startswith("VERSION"): version = lineArr[1].replace("\n", ""); fIn = open(inFile, "r") fileContent = fIn.read() s = Template(fileContent) newFileContent = s.substitute(VERSION=version,YEAR=date.today().year) fOut = open(outFile, "w"); fOut.write(newFileContent); print "Info.plist fresh created"
JennyCoin-Project/JennyCoin
share/qt/clean_mac_info_plist.py
Python
mit
899
from xml.dom import minidom class Schedule(object): def __init__(self): self._fixedtasks = [] self._tasks = [] self._fitness = 0 def __ne__(self, other): return not self == other def __eq__(self, other): for t in self.tasks: for t2 in other.tasks: if t == t2: continue return False for t in self.fixedtasks: for t2 in other.fixedtasks: if t == t2: continue return False return True def __copy__(self): s = Schedule() for t in self.tasks: s.tasks += [t.__copy__()] for t in self.fixedtasks: s.fixedtasks += [t.__copy__()] return s @property def tasks(self): return self._tasks @tasks.setter def tasks(self, val): self._tasks = val @property def fixedtasks(self): return self._fixedtasks @fixedtasks.setter def fixedtasks(self, val): self._fixedtasks = val @property def fitness(self): return self._fitness @fitness.setter def fitness(self, val): self._fitness = val class Task(object): def __init__(self, id = 0, begin = 0, duration = 0, deadline = 0, description = '', fixed = False): self._id = id self._begin = begin self._duration = duration self._deadline = deadline self._description = description self._fixed = fixed def __ne__(self, other): return not self == other def __eq__(self, other): if self.begin == other.begin and self.duration == other.duration and \ self.deadline == other.deadline: return True def __copy__(self): return Task(self.id, self.begin, self.duration, self.deadline, self.description) @property def id(self): return self._id @property def fixed(self): return self._fixed @fixed.setter def fixed(self, val): self._fixed = val @property def duration(self): return self._duration @duration.setter def duration(self, val): self._duration = val @property def deadline(self): return self._deadline @deadline.setter def deadline(self, val): self._deadline = val @property def begin(self): return self._begin @begin.setter def begin(self, val): self._begin = val @property def description(self): return self._description @description.setter def description(self, val): self._description = val
fredmorcos/attic
projects/autocal/attic/autocal-py/libautocal/schedule.py
Python
isc
2,209