repo_name
stringlengths
5
100
path
stringlengths
4
251
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
499
1.05M
license
stringclasses
15 values
keakon/redis-shard
examples/reshard.py
2
4045
#!/usr/bin/python # -*- coding: utf-8 -*- import argparse from redis_shard.shard import RedisShardAPI old_servers = [{'name': 'feed1', 'host': 'feedproxy', 'port': 6301, 'db': 0}, {'name': 'feed2', 'host': 'feedproxy', 'port': 6302, 'db': 0}, {'name': 'feed3', 'host': 'feedproxy', 'port': 6303, 'db': 0}, {'name': 'feed4', 'host': 'feedproxy', 'port': 6304, 'db': 0}, {'name': 'feed5', 'host': 'feedproxy', 'port': 6305, 'db': 0}, {'name': 'feed6', 'host': 'feedproxy', 'port': 6306, 'db': 0}, {'name': 'feed7', 'host': 'feedproxy', 'port': 6307, 'db': 0}, {'name': 'feed8', 'host': 'feedproxy', 'port': 6308, 'db': 0}, {'name': 'feed9', 'host': 'feedproxy', 'port': 6309, 'db': 0}, {'name': 'feed10', 'host': 'feedproxy', 'port': 6310, 'db': 0}, ] new_servers = [{'name': 'feed1', 'host': 'feedproxy', 'port': 6301, 'db': 0}, {'name': 'feed2', 'host': 'feedproxy', 'port': 6302, 'db': 0}, {'name': 'feed3', 'host': 'feedproxy', 'port': 6303, 'db': 0}, {'name': 'feed4', 'host': 'feedproxy', 'port': 6304, 'db': 0}, {'name': 'feed5', 'host': 'feedproxy', 'port': 6305, 'db': 0}, {'name': 'feed6', 'host': 'feedproxy', 'port': 6306, 'db': 0}, {'name': 'feed7', 'host': 'feedproxy', 'port': 6307, 'db': 0}, {'name': 'feed8', 'host': 'feedproxy', 'port': 6308, 'db': 0}, {'name': 'feed9', 'host': 'feedproxy', 'port': 6309, 'db': 0}, {'name': 'feed10', 'host': 'feedproxy', 'port': 6310, 'db': 0}, {'name': 'feed11', 'host': 'feedproxy', 'port': 6311, 'db': 0}, {'name': 'feed12', 'host': 'feedproxy', 'port': 6312, 'db': 0}, {'name': 'feed13', 'host': 'feedproxy', 'port': 6313, 'db': 0}, {'name': 'feed14', 'host': 'feedproxy', 'port': 6314, 'db': 0}, {'name': 'feed15', 'host': 'feedproxy', 'port': 6315, 'db': 0}, {'name': 'feed16', 'host': 'feedproxy', 'port': 6316, 'db': 0}, {'name': 'feed17', 'host': 'feedproxy', 'port': 6317, 'db': 0}, {'name': 'feed18', 'host': 'feedproxy', 'port': 6318, 'db': 0}, {'name': 'feed19', 'host': 'feedproxy', 'port': 6319, 'db': 0}, {'name': 'feed20', 'host': 'feedproxy', 'port': 6320, 'db': 0}, ] FEED_KEY = "{feed%(user_id)s}:list" old_shard = RedisShardAPI(old_servers) new_shard = RedisShardAPI(new_servers) def main(): parser = argparse.ArgumentParser(description='Reshard newsfeed instance') parser.add_argument( '--start', type=int, required=True, help='start user id') parser.add_argument('--end', type=int, required=True, help='start user id') parser.add_argument('--show_only', type=bool, default=False, help='only showw migrate process') parser.add_argument( '--delete', type=bool, default=False, help='real delete old data') args = parser.parse_args() migrate(args.start, args.end, args.delete, args.show_only) def migrate(start, end, delete, show_only=False): for user_id in range(start, end): feed_list_key = FEED_KEY % dict(user_id=user_id) old_server_name = old_shard.get_server_name(feed_list_key) new_server_name = new_shard.get_server_name(feed_list_key) if old_server_name != new_server_name: print "%s : %s => %s" % (user_id, old_server_name, new_server_name) if show_only: continue old_server = old_shard.get_server(feed_list_key) new_server = new_shard.get_server(feed_list_key) if not delete: for k, v in old_server.zrange(feed_list_key, 0, -1, withscores=True): new_server.zadd(feed_list_key, k, v) else: old_server.delete(feed_list_key) if __name__ == '__main__': main()
bsd-2-clause
Danfocus/Flexget
flexget/components/series/internal_estimator.py
4
2654
from __future__ import unicode_literals, division, absolute_import import logging from builtins import * # noqa pylint: disable=unused-import, redefined-builtin from datetime import timedelta from sqlalchemy import desc, func from flexget import plugin from flexget.event import event from flexget.manager import Session from flexget.utils.tools import multiply_timedelta from . import db log = logging.getLogger('est_series_internal') class EstimatesSeriesInternal(object): @plugin.priority(0) # Should always be last priority def estimate(self, entry): if not all(field in entry for field in ['series_name', 'series_season', 'series_episode']): return with Session() as session: series = ( session.query(db.Series).filter(db.Series.name == entry['series_name']).first() ) if not series: return episodes = ( session.query(db.Episode) .join(db.Episode.series) .filter(db.Episode.season != None) .filter(db.Series.id == series.id) .filter(db.Episode.season == func.max(db.Episode.season).select()) .order_by(desc(db.Episode.number)) .limit(2) .all() ) if len(episodes) < 2: return # If last two eps were not contiguous, don't guess if episodes[0].number != episodes[1].number + 1: return # If first_seen in None, return if episodes[0].first_seen is None or episodes[1].first_seen is None: return last_diff = episodes[0].first_seen - episodes[1].first_seen # If last eps were grabbed close together, we might be catching up, don't guess # Or, if last eps were too far apart, don't guess # TODO: What range? if last_diff < timedelta(days=2) or last_diff > timedelta(days=10): return # Estimate next season somewhat more than a normal episode break if entry['series_season'] > episodes[0].season: # TODO: How big should this be? return episodes[0].first_seen + multiply_timedelta(last_diff, 2) # Estimate next episode comes out about same length as last ep span, with a little leeway return episodes[0].first_seen + multiply_timedelta(last_diff, 0.9) @event('plugin.register') def register_plugin(): plugin.register( EstimatesSeriesInternal, 'est_series_internal', interfaces=['estimate_release'], api_ver=2 )
mit
awkspace/ansible
test/units/parsing/test_splitter.py
119
3868
# coding: utf-8 # (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.parsing.splitter import split_args, parse_kv import pytest SPLIT_DATA = ( (u'a', [u'a'], {u'_raw_params': u'a'}), (u'a=b', [u'a=b'], {u'a': u'b'}), (u'a="foo bar"', [u'a="foo bar"'], {u'a': u'foo bar'}), (u'"foo bar baz"', [u'"foo bar baz"'], {u'_raw_params': '"foo bar baz"'}), (u'foo bar baz', [u'foo', u'bar', u'baz'], {u'_raw_params': u'foo bar baz'}), (u'a=b c="foo bar"', [u'a=b', u'c="foo bar"'], {u'a': u'b', u'c': u'foo bar'}), (u'a="echo \\"hello world\\"" b=bar', [u'a="echo \\"hello world\\""', u'b=bar'], {u'a': u'echo "hello world"', u'b': u'bar'}), (u'a="multi\nline"', [u'a="multi\nline"'], {u'a': u'multi\nline'}), (u'a="blank\n\nline"', [u'a="blank\n\nline"'], {u'a': u'blank\n\nline'}), (u'a="blank\n\n\nlines"', [u'a="blank\n\n\nlines"'], {u'a': u'blank\n\n\nlines'}), (u'a="a long\nmessage\\\nabout a thing\n"', [u'a="a long\nmessage\\\nabout a thing\n"'], {u'a': u'a long\nmessage\\\nabout a thing\n'}), (u'a="multiline\nmessage1\\\n" b="multiline\nmessage2\\\n"', [u'a="multiline\nmessage1\\\n"', u'b="multiline\nmessage2\\\n"'], {u'a': 'multiline\nmessage1\\\n', u'b': u'multiline\nmessage2\\\n'}), (u'a={{jinja}}', [u'a={{jinja}}'], {u'a': u'{{jinja}}'}), (u'a={{ jinja }}', [u'a={{ jinja }}'], {u'a': u'{{ jinja }}'}), (u'a="{{jinja}}"', [u'a="{{jinja}}"'], {u'a': u'{{jinja}}'}), (u'a={{ jinja }}{{jinja2}}', [u'a={{ jinja }}{{jinja2}}'], {u'a': u'{{ jinja }}{{jinja2}}'}), (u'a="{{ jinja }}{{jinja2}}"', [u'a="{{ jinja }}{{jinja2}}"'], {u'a': u'{{ jinja }}{{jinja2}}'}), (u'a={{jinja}} b={{jinja2}}', [u'a={{jinja}}', u'b={{jinja2}}'], {u'a': u'{{jinja}}', u'b': u'{{jinja2}}'}), (u'a="{{jinja}}\n" b="{{jinja2}}\n"', [u'a="{{jinja}}\n"', u'b="{{jinja2}}\n"'], {u'a': u'{{jinja}}\n', u'b': u'{{jinja2}}\n'}), (u'a="café eñyei"', [u'a="café eñyei"'], {u'a': u'café eñyei'}), (u'a=café b=eñyei', [u'a=café', u'b=eñyei'], {u'a': u'café', u'b': u'eñyei'}), (u'a={{ foo | some_filter(\' \', " ") }} b=bar', [u'a={{ foo | some_filter(\' \', " ") }}', u'b=bar'], {u'a': u'{{ foo | some_filter(\' \', " ") }}', u'b': u'bar'}), (u'One\n Two\n Three\n', [u'One\n ', u'Two\n ', u'Three\n'], {u'_raw_params': u'One\n Two\n Three\n'}), ) SPLIT_ARGS = ((test[0], test[1]) for test in SPLIT_DATA) PARSE_KV = ((test[0], test[2]) for test in SPLIT_DATA) @pytest.mark.parametrize("args, expected", SPLIT_ARGS) def test_split_args(args, expected): assert split_args(args) == expected @pytest.mark.parametrize("args, expected", PARSE_KV) def test_parse_kv(args, expected): assert parse_kv(args) == expected
gpl-3.0
Keiiko/anime-manga-cz-downloader
downloader-v0.22pa.py
1
3746
import urllib2 import re import os def stahniHtml(url): f = urllib2.urlopen(url) obsah = f.read() f.close() return obsah def nahled(url): global chapter global currpatch1 odkazy = vyberodkazux(url) for odkaz in odkazy: currpatch1 = odkaz.replace("index.html", "") chapter = re.search(r'.*/(.*?)/index',odkaz).group(1) print "Kapitola "+chapter print " Stahovani nahledu kapitoly... ", nahledhtml = stahniHtml(odkaz) print "Hotovo." print " Vyhledavani odkazu stranky... ", tabulka = re.search(r'<!-- Thumbnail images -->(.*?)class="xsmalltxt"',nahledhtml, re.DOTALL).group(1) nahledyurl = re.findall(r'<a href="(.*?)"',tabulka) print "Hotovo." kapitola(nahledyurl) print "Vsechna stahovani dokoncena." finalpatch = os.path.expanduser("~")+"\\Downloads\\anime-manga.cz-downloader\\"+nazevserie+"\\" print "Ulozeno do: "+finalpatch os.startfile(finalpatch) def kapitola(nahledyurl): for kapitolasmallurl in nahledyurl: kapitolafullurl = currpatch1 + kapitolasmallurl getobrazek(kapitolafullurl) def getobrazek(kapitolafullurl): global imgname print " Vyhledavani odkazu obrazku... ", obrazekshorturl = re.search(r'<img id="slide" src="(.*?)".*?>',stahniHtml(kapitolafullurl)).group(1).replace("../", "") imgname = obrazekshorturl print "Hotovo." obrazekfullurl = currpatch1 + obrazekshorturl #print obrazekfullurl ulozitobr(obrazekfullurl) def ulozitobr(obrazekfullurl): print " Ukladani obrazku "+obrazekfullurl+"... ", currentpatch = os.path.expanduser("~")+"\\Downloads\\anime-manga.cz-downloader\\"+nazevserie+"\\"+chapter+"\\" createDir(currentpatch) imgData = urllib2.urlopen(obrazekfullurl).read() output = open(currentpatch+imgname,'wb') output.write(imgData) output.close() print "Hotovo." def createDir(path): if os.path.exists(path) != True: os.makedirs(path) ### 18+ rozsireni ### def vyberodkazux(url): global nazevserie print "Stahovani hlavni stranky... ", stranka = stahniHtml(url) print "Hotovo." print "Vyhledavani kapitol... ", odkazy = odkazya(stranka) + odkazyb(stranka) nazevserie = re.search(r'<title>(.*?) *\| Anime - Manga.*?</title>',stranka).group(1).replace(" ", "").replace(" ", " ").replace(" ", " ") print "Hotovo." print "Manga "+nazevserie return odkazy def odkazya(stranka): odkazy1 = re.findall(r'<a href="(http://anime-manga.cz/manga.*?)"', stranka) odkazy2 = re.findall(r'<a href="(http://www.anime-manga.cz/manga.*?)"',stranka) odkazy = odkazy1 + odkazy2 return odkazy def odkazyb(stranka): odkazy18 = re.findall(r'<a href="(http://anime-manga.cz/\d[^/]*?)"|<a href="(http://www.anime-manga.cz/\d[^/]*?)"|<a href="(http://anime-manga.cz/[^/]*?\d)"|<a href="(http://www.anime-manga.cz/[^/]*?\d)"', stranka) odkazy = [] for odkaz18 in odkazy18: for i in range(4): if odkaz18[i]!= '': stranka18 = stahniHtml(odkaz18[i]) odkazy.append(re.search(r'<a href="(.*?anime-manga.cz/manga.*?)"',stranka18).group(1)) return odkazy ### Proxy ### def inicializaceproxy(): prx = raw_input('Zadej prihlasovani ve tvaru http://username:password@proxyserver.domain.com: ') os.environ['HTTP_PROXY'] = prx proxy = urllib2.ProxyHandler({'http': prx}) opener = urllib2.build_opener(proxy) urllib2.install_opener(opener) print "Anime-manga.cz Downloader PROXY alfa" inicializaceproxy() xurl = raw_input('stahnout mangu s url: http://www.anime-manga.cz/') nahled("http://www.anime-manga.cz/"+xurl)
mit
evook/mycli
mycli/packages/expanded.py
23
1070
from .tabulate import _text_type def pad(field, total, char=u" "): return field + (char * (total - len(field))) def get_separator(num, header_len, data_len): sep = u"***************************[ %d. row ]***************************\n" % (num + 1) return sep def expanded_table(rows, headers): header_len = max([len(x) for x in headers]) max_row_len = 0 results = [] padded_headers = [pad(x, header_len) + u" |" for x in headers] header_len += 2 for row in rows: row_len = max([len(_text_type(x)) for x in row]) row_result = [] if row_len > max_row_len: max_row_len = row_len for header, value in zip(padded_headers, row): if value is None: value = '<null>' row_result.append(u"%s %s" % (header, value)) results.append('\n'.join(row_result)) output = [] for i, result in enumerate(results): output.append(get_separator(i, header_len, max_row_len)) output.append(result) output.append('\n') return ''.join(output)
bsd-3-clause
lairdm/islandviewer-ui
Islandviewer/wsgi.py
2
1437
""" WSGI config for Islandviewer project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use # os.environ["DJANGO_SETTINGS_MODULE"] = "Islandviewer.settings" os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Islandviewer.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
gpl-3.0
pbrady/sympy
sympy/vector/tests/test_dyadic.py
94
4076
from sympy import sin, cos, symbols, pi, ImmutableMatrix as Matrix, \ simplify from sympy.vector import (CoordSysCartesian, Vector, Dyadic, DyadicAdd, DyadicMul, DyadicZero, BaseDyadic, express) A = CoordSysCartesian('A') def test_dyadic(): a, b = symbols('a, b') assert Dyadic.zero != 0 assert isinstance(Dyadic.zero, DyadicZero) assert BaseDyadic(A.i, A.j) != BaseDyadic(A.j, A.i) assert (BaseDyadic(Vector.zero, A.i) == BaseDyadic(A.i, Vector.zero) == Dyadic.zero) d1 = A.i | A.i d2 = A.j | A.j d3 = A.i | A.j assert isinstance(d1, BaseDyadic) d_mul = a*d1 assert isinstance(d_mul, DyadicMul) assert d_mul.base_dyadic == d1 assert d_mul.measure_number == a assert isinstance(a*d1 + b*d3, DyadicAdd) assert d1 == A.i.outer(A.i) assert d3 == A.i.outer(A.j) v1 = a*A.i - A.k v2 = A.i + b*A.j assert v1 | v2 == v1.outer(v2) == a * (A.i|A.i) + (a*b) * (A.i|A.j) +\ - (A.k|A.i) - b * (A.k|A.j) assert d1 * 0 == Dyadic.zero assert d1 != Dyadic.zero assert d1 * 2 == 2 * (A.i | A.i) assert d1 / 2. == 0.5 * d1 assert d1.dot(0 * d1) == Vector.zero assert d1 & d2 == Dyadic.zero assert d1.dot(A.i) == A.i == d1 & A.i assert d1.cross(Vector.zero) == Dyadic.zero assert d1.cross(A.i) == Dyadic.zero assert d1 ^ A.j == d1.cross(A.j) assert d1.cross(A.k) == - A.i | A.j assert d2.cross(A.i) == - A.j | A.k == d2 ^ A.i assert A.i ^ d1 == Dyadic.zero assert A.j.cross(d1) == - A.k | A.i == A.j ^ d1 assert Vector.zero.cross(d1) == Dyadic.zero assert A.k ^ d1 == A.j | A.i assert A.i.dot(d1) == A.i & d1 == A.i assert A.j.dot(d1) == Vector.zero assert Vector.zero.dot(d1) == Vector.zero assert A.j & d2 == A.j assert d1.dot(d3) == d1 & d3 == A.i | A.j == d3 assert d3 & d1 == Dyadic.zero q = symbols('q') B = A.orient_new_axis('B', q, A.k) assert express(d1, B) == express(d1, B, B) assert express(d1, B) == ((cos(q)**2) * (B.i | B.i) + (-sin(q) * cos(q)) * (B.i | B.j) + (-sin(q) * cos(q)) * (B.j | B.i) + (sin(q)**2) * (B.j | B.j)) assert express(d1, B, A) == (cos(q)) * (B.i | A.i) + (-sin(q)) * (B.j | A.i) assert express(d1, A, B) == (cos(q)) * (A.i | B.i) + (-sin(q)) * (A.i | B.j) assert d1.to_matrix(A) == Matrix([[1, 0, 0], [0, 0, 0], [0, 0, 0]]) assert d1.to_matrix(A, B) == Matrix([[cos(q), -sin(q), 0], [0, 0, 0], [0, 0, 0]]) assert d3.to_matrix(A) == Matrix([[0, 1, 0], [0, 0, 0], [0, 0, 0]]) a, b, c, d, e, f = symbols('a, b, c, d, e, f') v1 = a * A.i + b * A.j + c * A.k v2 = d * A.i + e * A.j + f * A.k d4 = v1.outer(v2) assert d4.to_matrix(A) == Matrix([[a * d, a * e, a * f], [b * d, b * e, b * f], [c * d, c * e, c * f]]) d5 = v1.outer(v1) C = A.orient_new_axis('C', q, A.i) for expected, actual in zip(C.rotation_matrix(A) * d5.to_matrix(A) * \ C.rotation_matrix(A).T, d5.to_matrix(C)): assert (expected - actual).simplify() == 0 def test_dyadic_simplify(): x, y, z, k, n, m, w, f, s, A = symbols('x, y, z, k, n, m, w, f, s, A') N = CoordSysCartesian('N') dy = N.i | N.i test1 = (1 / x + 1 / y) * dy assert (N.i & test1 & N.i) != (x + y) / (x * y) test1 = test1.simplify() assert test1.simplify() == simplify(test1) assert (N.i & test1 & N.i) == (x + y) / (x * y) test2 = (A**2 * s**4 / (4 * pi * k * m**3)) * dy test2 = test2.simplify() assert (N.i & test2 & N.i) == (A**2 * s**4 / (4 * pi * k * m**3)) test3 = ((4 + 4 * x - 2 * (2 + 2 * x)) / (2 + 2 * x)) * dy test3 = test3.simplify() assert (N.i & test3 & N.i) == 0 test4 = ((-4 * x * y**2 - 2 * y**3 - 2 * x**2 * y) / (x + y)**2) * dy test4 = test4.simplify() assert (N.i & test4 & N.i) == -2 * y
bsd-3-clause
ryandougherty/mwa-capstone
MWA_Tools/build/matplotlib/lib/mpl_examples/misc/longshort.py
6
1676
""" Illustrate the rec array utility funcitons by loading prices from a csv file, computing the daily returns, appending the results to the record arrays, joining on date """ import urllib import numpy as np import matplotlib.pyplot as plt import matplotlib.mlab as mlab # grab the price data off yahoo u1 = urllib.urlretrieve('http://ichart.finance.yahoo.com/table.csv?s=AAPL&d=9&e=14&f=2008&g=d&a=8&b=7&c=1984&ignore=.csv') u2 = urllib.urlretrieve('http://ichart.finance.yahoo.com/table.csv?s=GOOG&d=9&e=14&f=2008&g=d&a=8&b=7&c=1984&ignore=.csv') # load the CSV files into record arrays r1 = mlab.csv2rec(file(u1[0])) r2 = mlab.csv2rec(file(u2[0])) # compute the daily returns and add these columns to the arrays gains1 = np.zeros_like(r1.adj_close) gains2 = np.zeros_like(r2.adj_close) gains1[1:] = np.diff(r1.adj_close)/r1.adj_close[:-1] gains2[1:] = np.diff(r2.adj_close)/r2.adj_close[:-1] r1 = mlab.rec_append_fields(r1, 'gains', gains1) r2 = mlab.rec_append_fields(r2, 'gains', gains2) # now join them by date; the default postfixes are 1 and 2. The # default jointype is inner so it will do an intersection of dates and # drop the dates in AAPL which occurred before GOOG started trading in # 2004. r1 and r2 are reverse ordered by date since Yahoo returns # most recent first in the CSV files, but rec_join will sort by key so # r below will be properly sorted r = mlab.rec_join('date', r1, r2) # long appl, short goog g = r.gains1-r.gains2 tr = (1+g).cumprod() # the total return # plot the return fig = plt.figure() ax = fig.add_subplot(111) ax.plot(r.date, tr) ax.set_title('total return: long APPL, short GOOG') ax.grid() fig.autofmt_xdate() plt.show()
gpl-2.0
opennode/nodeconductor
waldur_core/cost_tracking/tests/test_price_estimate.py
1
9235
from ddt import ddt, data from rest_framework import status from waldur_core.structure.tests import factories as structure_factories from . import factories from .base_test import BaseCostTrackingTest from .. import models @ddt class PriceEstimateListTest(BaseCostTrackingTest): def setUp(self): super(PriceEstimateListTest, self).setUp() self.link_price_estimate = factories.PriceEstimateFactory( year=2012, month=10, scope=self.service_project_link) self.project_price_estimate = factories.PriceEstimateFactory(scope=self.project, year=2015, month=7) @data('owner', 'manager', 'administrator') def test_user_can_see_price_estimate_for_his_project(self, user): self.client.force_authenticate(self.users[user]) response = self.client.get(factories.PriceEstimateFactory.get_list_url()) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn(self.project_price_estimate.uuid.hex, [obj['uuid'] for obj in response.data]) @data('owner', 'manager', 'administrator') def test_user_cannot_see_price_estimate_for_not_his_project(self, user): other_price_estimate = factories.PriceEstimateFactory() self.client.force_authenticate(self.users[user]) response = self.client.get(factories.PriceEstimateFactory.get_list_url()) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertNotIn(other_price_estimate.uuid.hex, [obj['uuid'] for obj in response.data]) def test_user_can_filter_price_estimate_by_scope(self): self.client.force_authenticate(self.users['owner']) response = self.client.get( factories.PriceEstimateFactory.get_list_url(), data={'scope': structure_factories.ProjectFactory.get_url(self.project)}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1) self.assertEqual(response.data[0]['uuid'], self.project_price_estimate.uuid.hex) def test_user_can_filter_price_estimates_by_date(self): self.client.force_authenticate(self.users['administrator']) response = self.client.get( factories.PriceEstimateFactory.get_list_url(), data={'date': '{}.{}'.format(self.link_price_estimate.year, self.link_price_estimate.month)}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1) self.assertEqual(response.data[0]['uuid'], self.link_price_estimate.uuid.hex) def test_user_can_filter_price_estimates_by_date_range(self): self.client.force_authenticate(self.users['manager']) response = self.client.get( factories.PriceEstimateFactory.get_list_url(), data={'start': '{}.{}'.format(self.link_price_estimate.year, self.link_price_estimate.month + 1), 'end': '{}.{}'.format(self.project_price_estimate.year, self.project_price_estimate.month + 1)}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1) self.assertEqual(response.data[0]['uuid'], self.project_price_estimate.uuid.hex) def test_user_receive_error_on_filtering_by_not_visible_for_him_object(self): data = {'scope': structure_factories.ProjectFactory.get_url()} self.client.force_authenticate(self.users['administrator']) response = self.client.get(factories.PriceEstimateFactory.get_list_url(), data=data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_user_can_define_children_visibility_depth(self): customer_price_estimate = factories.PriceEstimateFactory(scope=self.customer, year=2015, month=7) customer_price_estimate.children.add(self.project_price_estimate) spl_price_estimate = factories.PriceEstimateFactory(scope=self.service_project_link, year=2015, month=7) self.project_price_estimate.children.add(spl_price_estimate) self.client.force_authenticate(self.users['owner']) response = self.client.get(factories.PriceEstimateFactory.get_url(customer_price_estimate), data={'depth': 1}) self.assertEqual(response.status_code, status.HTTP_200_OK) # with visibility depth 1 we want to see customer estimate children self.assertEqual(len(response.data['children']), 1) project_estimate_data = response.data['children'][0] self.assertEqual(project_estimate_data['uuid'], self.project_price_estimate.uuid.hex) # with visibility depth 1 we do not want to see grandchildren self.assertNotIn('children', project_estimate_data) class PriceEstimateUpdateTest(BaseCostTrackingTest): def setUp(self): super(PriceEstimateUpdateTest, self).setUp() self.price_estimate = factories.PriceEstimateFactory(scope=self.service_project_link) self.valid_data = { 'scope': structure_factories.TestServiceProjectLinkFactory.get_url(self.service_project_link), 'total': 100, 'details': {'ram': 50, 'disk': 50}, 'month': 7, 'year': 2015, } def test_price_estimate_scope_cannot_be_updated(self): other_service_project_link = structure_factories.TestServiceProjectLinkFactory(project=self.project) self.valid_data['scope'] = structure_factories.TestServiceProjectLinkFactory.get_url( other_service_project_link) self.client.force_authenticate(self.users['staff']) response = self.client.patch(factories.PriceEstimateFactory.get_url(self.price_estimate), data=self.valid_data) self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) reread_price_estimate = models.PriceEstimate.objects.get(id=self.price_estimate.id) self.assertNotEqual(reread_price_estimate.scope, other_service_project_link) def test_autocalculated_estimate_cannot_be_manually_updated(self): self.client.force_authenticate(self.users['staff']) response = self.client.patch(factories.PriceEstimateFactory.get_url(self.price_estimate), data=self.valid_data) self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) class PriceEstimateDeleteTest(BaseCostTrackingTest): def setUp(self): super(PriceEstimateDeleteTest, self).setUp() self.project_price_estimate = factories.PriceEstimateFactory(scope=self.project) def test_autocreated_price_estimate_cannot_be_deleted(self): self.client.force_authenticate(self.users['staff']) response = self.client.delete(factories.PriceEstimateFactory.get_url(self.project_price_estimate)) self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) class ScopeTypeFilterTest(BaseCostTrackingTest): def setUp(self): super(ScopeTypeFilterTest, self).setUp() resource = structure_factories.TestNewInstanceFactory(service_project_link=self.service_project_link) self.estimates = { 'customer': models.PriceEstimate.objects.get(scope=self.customer), 'service': models.PriceEstimate.objects.get(scope=self.service), 'project': models.PriceEstimate.objects.get(scope=self.project), 'service_project_link': models.PriceEstimate.objects.get(scope=self.service_project_link), 'resource': models.PriceEstimate.objects.get(scope=resource), } def test_user_can_filter_price_estimate_by_scope_type(self): self.client.force_authenticate(self.users['owner']) for scope_type, estimate in self.estimates.items(): response = self.client.get( factories.PriceEstimateFactory.get_list_url(), data={'scope_type': scope_type}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 1, response.data) self.assertEqual(response.data[0]['uuid'], estimate.uuid.hex) class CustomerFilterTest(BaseCostTrackingTest): def setUp(self): super(CustomerFilterTest, self).setUp() resource = structure_factories.TestNewInstanceFactory() link = resource.service_project_link customer = link.customer project = link.project service = link.service scopes = {link, customer, project, service, resource} self.estimates = {models.PriceEstimate.objects.get(scope=scope) for scope in scopes} self.customer = customer resource2 = structure_factories.TestNewInstanceFactory() resource2_estimate = factories.PriceEstimateFactory(scope=resource2) resource2_estimate.create_ancestors() def test_user_can_filter_price_estimate_by_customer_uuid(self): self.client.force_authenticate(self.users['staff']) response = self.client.get( factories.PriceEstimateFactory.get_list_url(), data={'customer': self.customer.uuid.hex}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual({estimate['uuid'] for estimate in response.data}, {estimate.uuid.hex for estimate in self.estimates})
mit
ryandougherty/mwa-capstone
MWA_Tools/build/matplotlib/examples/pylab_examples/polar_demo.py
3
2336
#!/usr/bin/env python # # matplotlib now has a PolarAxes class and a polar function in the # matplotlib interface. This is considered alpha and the interface # may change as we work out how polar axes should best be integrated # # The only function that has been tested on polar axes is "plot" (the # pylab interface function "polar" calls ax.plot where ax is a # PolarAxes) -- other axes plotting functions may work on PolarAxes # but haven't been tested and may need tweaking. # # you can get get a PolarSubplot instance by doing, for example # # subplot(211, polar=True) # # or a PolarAxes instance by doing # axes([left, bottom, width, height], polar=True) # # The view limits (eg xlim and ylim) apply to the lower left and upper # right of the rectangular box that surrounds to polar axes. Eg if # you have # # r = arange(0,1,0.01) # theta = 2*pi*r # # the lower left corner is 5/4pi, sqrt(2) and the # upper right corner is 1/4pi, sqrt(2) # # you could change the radial bounding box (zoom out) by setting the # ylim (radial coordinate is the second argument to the plot command, # as in MATLAB, though this is not advised currently because it is not # clear to me how the axes should behave in the change of view limits. # Please advise me if you have opinions. Likewise, the pan/zoom # controls probably do not do what you think they do and are better # left alone on polar axes. Perhaps I will disable them for polar # axes unless we come up with a meaningful, useful and functional # implementation for them. # # See the pylab rgrids and thetagrids functions for # information on how to customize the grid locations and labels import matplotlib import numpy as np from matplotlib.pyplot import figure, show, rc, grid # radar green, solid grid lines rc('grid', color='#316931', linewidth=1, linestyle='-') rc('xtick', labelsize=15) rc('ytick', labelsize=15) # force square figure and square axes looks better for polar, IMO width, height = matplotlib.rcParams['figure.figsize'] size = min(width, height) # make a square figure fig = figure(figsize=(size, size)) ax = fig.add_axes([0.1, 0.1, 0.8, 0.8], polar=True, axisbg='#d5de9c') r = np.arange(0, 3.0, 0.01) theta = 2*np.pi*r ax.plot(theta, r, color='#ee8d18', lw=3) ax.set_rmax(2.0) grid(True) ax.set_title("And there was much rejoicing!", fontsize=20) show()
gpl-2.0
MattRijk/django-ecomsite
lib/python2.7/site-packages/django/contrib/messages/tests/test_cookie.py
75
7150
import json from django.contrib.messages import constants from django.contrib.messages.tests.base import BaseTests from django.contrib.messages.storage.cookie import (CookieStorage, MessageEncoder, MessageDecoder) from django.contrib.messages.storage.base import Message from django.test import TestCase from django.test.utils import override_settings from django.utils.safestring import SafeData, mark_safe def set_cookie_data(storage, messages, invalid=False, encode_empty=False): """ Sets ``request.COOKIES`` with the encoded data and removes the storage backend's loaded data cache. """ encoded_data = storage._encode(messages, encode_empty=encode_empty) if invalid: # Truncate the first character so that the hash is invalid. encoded_data = encoded_data[1:] storage.request.COOKIES = {CookieStorage.cookie_name: encoded_data} if hasattr(storage, '_loaded_data'): del storage._loaded_data def stored_cookie_messages_count(storage, response): """ Returns an integer containing the number of messages stored. """ # Get a list of cookies, excluding ones with a max-age of 0 (because # they have been marked for deletion). cookie = response.cookies.get(storage.cookie_name) if not cookie or cookie['max-age'] == 0: return 0 data = storage._decode(cookie.value) if not data: return 0 if data[-1] == CookieStorage.not_finished: data.pop() return len(data) @override_settings(SESSION_COOKIE_DOMAIN='.example.com') class CookieTest(BaseTests, TestCase): storage_class = CookieStorage def stored_messages_count(self, storage, response): return stored_cookie_messages_count(storage, response) def test_get(self): storage = self.storage_class(self.get_request()) # Set initial data. example_messages = ['test', 'me'] set_cookie_data(storage, example_messages) # Test that the message actually contains what we expect. self.assertEqual(list(storage), example_messages) def test_domain(self): """ Ensure that CookieStorage honors SESSION_COOKIE_DOMAIN. Refs #15618. """ # Test before the messages have been consumed storage = self.get_storage() response = self.get_response() storage.add(constants.INFO, 'test') storage.update(response) self.assertTrue('test' in response.cookies['messages'].value) self.assertEqual(response.cookies['messages']['domain'], '.example.com') self.assertEqual(response.cookies['messages']['expires'], '') # Test after the messages have been consumed storage = self.get_storage() response = self.get_response() storage.add(constants.INFO, 'test') for m in storage: pass # Iterate through the storage to simulate consumption of messages. storage.update(response) self.assertEqual(response.cookies['messages'].value, '') self.assertEqual(response.cookies['messages']['domain'], '.example.com') self.assertEqual(response.cookies['messages']['expires'], 'Thu, 01-Jan-1970 00:00:00 GMT') def test_get_bad_cookie(self): request = self.get_request() storage = self.storage_class(request) # Set initial (invalid) data. example_messages = ['test', 'me'] set_cookie_data(storage, example_messages, invalid=True) # Test that the message actually contains what we expect. self.assertEqual(list(storage), []) def test_max_cookie_length(self): """ Tests that, if the data exceeds what is allowed in a cookie, older messages are removed before saving (and returned by the ``update`` method). """ storage = self.get_storage() response = self.get_response() # When storing as a cookie, the cookie has constant overhead of approx # 54 chars, and each message has a constant overhead of about 37 chars # and a variable overhead of zero in the best case. We aim for a message # size which will fit 4 messages into the cookie, but not 5. # See also FallbackTest.test_session_fallback msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37) for i in range(5): storage.add(constants.INFO, str(i) * msg_size) unstored_messages = storage.update(response) cookie_storing = self.stored_messages_count(storage, response) self.assertEqual(cookie_storing, 4) self.assertEqual(len(unstored_messages), 1) self.assertTrue(unstored_messages[0].message == '0' * msg_size) def test_json_encoder_decoder(self): """ Tests that a complex nested data structure containing Message instances is properly encoded/decoded by the custom JSON encoder/decoder classes. """ messages = [ { 'message': Message(constants.INFO, 'Test message'), 'message_list': [Message(constants.INFO, 'message %s') \ for x in range(5)] + [{'another-message': \ Message(constants.ERROR, 'error')}], }, Message(constants.INFO, 'message %s'), ] encoder = MessageEncoder(separators=(',', ':')) value = encoder.encode(messages) decoded_messages = json.loads(value, cls=MessageDecoder) self.assertEqual(messages, decoded_messages) def test_safedata(self): """ Tests that a message containing SafeData is keeping its safe status when retrieved from the message storage. """ def encode_decode(data): message = Message(constants.DEBUG, data) encoded = storage._encode(message) decoded = storage._decode(encoded) return decoded.message storage = self.get_storage() self.assertIsInstance( encode_decode(mark_safe("<b>Hello Django!</b>")), SafeData) self.assertNotIsInstance( encode_decode("<b>Hello Django!</b>"), SafeData) def test_pre_1_5_message_format(self): """ For ticket #22426. Tests whether messages that were set in the cookie before the addition of is_safedata are decoded correctly. """ # Encode the messages using the current encoder. messages = [Message(constants.INFO, 'message %s') for x in range(5)] encoder = MessageEncoder(separators=(',', ':')) encoded_messages = encoder.encode(messages) # Remove the is_safedata flag from the messages in order to imitate # the behavior of before 1.5 (monkey patching). encoded_messages = json.loads(encoded_messages) for obj in encoded_messages: obj.pop(1) encoded_messages = json.dumps(encoded_messages, separators=(',', ':')) # Decode the messages in the old format (without is_safedata) decoded_messages = json.loads(encoded_messages, cls=MessageDecoder) self.assertEqual(messages, decoded_messages)
cc0-1.0
punchagan/zulip
zerver/management/commands/change_user_role.py
4
3517
from argparse import ArgumentParser from typing import Any from django.core.management.base import CommandError from zerver.lib.actions import ( do_change_can_create_users, do_change_can_forge_sender, do_change_user_role, ) from zerver.lib.management import ZulipBaseCommand from zerver.models import UserProfile class Command(ZulipBaseCommand): help = """Change role of an existing user in their (own) Realm. ONLY perform this on customer request from an authorized person. """ def add_arguments(self, parser: ArgumentParser) -> None: parser.add_argument("email", metavar="<email>", help="email of user to change role") parser.add_argument( "new_role", metavar="<new_role>", choices=[ "owner", "admin", "moderator", "member", "guest", "can_forge_sender", "can_create_users", ], help="new role of the user", ) parser.add_argument( "--revoke", dest="grant", action="store_false", help="Remove can_forge_sender or can_create_users permission.", ) self.add_realm_args(parser, required=True) def handle(self, *args: Any, **options: Any) -> None: email = options["email"] realm = self.get_realm(options) user = self.get_user(email, realm) user_role_map = { "owner": UserProfile.ROLE_REALM_OWNER, "admin": UserProfile.ROLE_REALM_ADMINISTRATOR, "moderator": UserProfile.ROLE_MODERATOR, "member": UserProfile.ROLE_MEMBER, "guest": UserProfile.ROLE_GUEST, } if options["new_role"] not in ["can_forge_sender", "can_create_users"]: new_role = user_role_map[options["new_role"]] if not options["grant"]: raise CommandError( "Revoke not supported with this permission; please specify new role." ) if new_role == user.role: raise CommandError("User already has this role.") old_role_name = UserProfile.ROLE_ID_TO_NAME_MAP[user.role] do_change_user_role(user, new_role, acting_user=None) new_role_name = UserProfile.ROLE_ID_TO_NAME_MAP[user.role] print( f"Role for {user.delivery_email} changed from {old_role_name} to {new_role_name}." ) return if options["new_role"] == "can_forge_sender": if user.can_forge_sender and options["grant"]: raise CommandError("User can already forge messages for this realm.") elif not user.can_forge_sender and not options["grant"]: raise CommandError("User can't forge messages for this realm.") do_change_can_forge_sender(user, options["grant"]) granted_text = "have" if options["grant"] else "not have" print( f"{user.delivery_email} changed to {granted_text} {options['new_role']} permission." ) else: if user.can_create_users and options["grant"]: raise CommandError("User can already create users for this realm.") elif not user.can_create_users and not options["grant"]: raise CommandError("User can't create users for this realm.") do_change_can_create_users(user, options["grant"])
apache-2.0
punchagan/zulip
zproject/wsgi.py
8
1917
""" WSGI config for zulip project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os import sys BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.append(BASE_DIR) from scripts.lib.setup_path import setup_path setup_path() os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings") from django.core.wsgi import get_wsgi_application try: # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. application = get_wsgi_application() except Exception: # If /etc/zulip/settings.py contains invalid syntax, Django # initialization will fail in django.setup(). In this case, our # normal configuration to logs errors to /var/log/zulip/errors.log # won't have been initialized. Since it's really valuable for the # debugging process for a Zulip 500 error to always be "check # /var/log/zulip/errors.log", we log to that file directly here. import logging logging.basicConfig( filename="/var/log/zulip/errors.log", level=logging.INFO, format="%(asctime)s %(levelname)s %(name)s %(message)s", ) logger = logging.getLogger(__name__) logger.exception("get_wsgi_application() failed:") raise
apache-2.0
wujuguang/tornado
tornado/process.py
1
12689
# # Copyright 2011 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities for working with multiple processes, including both forking the server into multiple processes and managing subprocesses. """ import os import multiprocessing import signal import subprocess import sys import time from binascii import hexlify from tornado.concurrent import ( Future, future_set_result_unless_cancelled, future_set_exception_unless_cancelled, ) from tornado import ioloop from tornado.iostream import PipeIOStream from tornado.log import gen_log from tornado.platform.auto import set_close_exec import typing from typing import Tuple, Optional, Any, Callable if typing.TYPE_CHECKING: from typing import List # noqa: F401 # Re-export this exception for convenience. CalledProcessError = subprocess.CalledProcessError def cpu_count() -> int: """Returns the number of processors on this machine.""" if multiprocessing is None: return 1 try: return multiprocessing.cpu_count() except NotImplementedError: pass try: return os.sysconf("SC_NPROCESSORS_CONF") except (AttributeError, ValueError): pass gen_log.error("Could not detect number of processors; assuming 1") return 1 def _reseed_random() -> None: if "random" not in sys.modules: return import random # If os.urandom is available, this method does the same thing as # random.seed (at least as of python 2.6). If os.urandom is not # available, we mix in the pid in addition to a timestamp. try: seed = int(hexlify(os.urandom(16)), 16) except NotImplementedError: seed = int(time.time() * 1000) ^ os.getpid() random.seed(seed) def _pipe_cloexec() -> Tuple[int, int]: r, w = os.pipe() set_close_exec(r) set_close_exec(w) return r, w _task_id = None def fork_processes( num_processes: Optional[int], max_restarts: Optional[int] = None ) -> int: """Starts multiple worker processes. If ``num_processes`` is None or <= 0, we detect the number of cores available on this machine and fork that number of child processes. If ``num_processes`` is given and > 0, we fork that specific number of sub-processes. Since we use processes and not threads, there is no shared memory between any server code. Note that multiple processes are not compatible with the autoreload module (or the ``autoreload=True`` option to `tornado.web.Application` which defaults to True when ``debug=True``). When using multiple processes, no IOLoops can be created or referenced until after the call to ``fork_processes``. In each child process, ``fork_processes`` returns its *task id*, a number between 0 and ``num_processes``. Processes that exit abnormally (due to a signal or non-zero exit status) are restarted with the same id (up to ``max_restarts`` times). In the parent process, ``fork_processes`` returns None if all child processes have exited normally, but will otherwise only exit by throwing an exception. max_restarts defaults to 100. Availability: Unix """ if max_restarts is None: max_restarts = 100 global _task_id assert _task_id is None if num_processes is None or num_processes <= 0: num_processes = cpu_count() gen_log.info("Starting %d processes", num_processes) children = {} def start_child(i: int) -> Optional[int]: pid = os.fork() if pid == 0: # child process _reseed_random() global _task_id _task_id = i return i else: children[pid] = i return None for i in range(num_processes): id = start_child(i) if id is not None: return id num_restarts = 0 while children: pid, status = os.wait() if pid not in children: continue id = children.pop(pid) if os.WIFSIGNALED(status): gen_log.warning( "child %d (pid %d) killed by signal %d, restarting", id, pid, os.WTERMSIG(status), ) elif os.WEXITSTATUS(status) != 0: gen_log.warning( "child %d (pid %d) exited with status %d, restarting", id, pid, os.WEXITSTATUS(status), ) else: gen_log.info("child %d (pid %d) exited normally", id, pid) continue num_restarts += 1 if num_restarts > max_restarts: raise RuntimeError("Too many child restarts, giving up") new_id = start_child(id) if new_id is not None: return new_id # All child processes exited cleanly, so exit the master process # instead of just returning to right after the call to # fork_processes (which will probably just start up another IOLoop # unless the caller checks the return value). sys.exit(0) def task_id() -> Optional[int]: """Returns the current task id, if any. Returns None if this process was not created by `fork_processes`. """ global _task_id return _task_id class Subprocess(object): """Wraps ``subprocess.Popen`` with IOStream support. The constructor is the same as ``subprocess.Popen`` with the following additions: * ``stdin``, ``stdout``, and ``stderr`` may have the value ``tornado.process.Subprocess.STREAM``, which will make the corresponding attribute of the resulting Subprocess a `.PipeIOStream`. If this option is used, the caller is responsible for closing the streams when done with them. The ``Subprocess.STREAM`` option and the ``set_exit_callback`` and ``wait_for_exit`` methods do not work on Windows. There is therefore no reason to use this class instead of ``subprocess.Popen`` on that platform. .. versionchanged:: 5.0 The ``io_loop`` argument (deprecated since version 4.1) has been removed. """ STREAM = object() _initialized = False _waiting = {} # type: ignore _old_sigchld = None def __init__(self, *args: Any, **kwargs: Any) -> None: self.io_loop = ioloop.IOLoop.current() # All FDs we create should be closed on error; those in to_close # should be closed in the parent process on success. pipe_fds = [] # type: List[int] to_close = [] # type: List[int] if kwargs.get("stdin") is Subprocess.STREAM: in_r, in_w = _pipe_cloexec() kwargs["stdin"] = in_r pipe_fds.extend((in_r, in_w)) to_close.append(in_r) self.stdin = PipeIOStream(in_w) if kwargs.get("stdout") is Subprocess.STREAM: out_r, out_w = _pipe_cloexec() kwargs["stdout"] = out_w pipe_fds.extend((out_r, out_w)) to_close.append(out_w) self.stdout = PipeIOStream(out_r) if kwargs.get("stderr") is Subprocess.STREAM: err_r, err_w = _pipe_cloexec() kwargs["stderr"] = err_w pipe_fds.extend((err_r, err_w)) to_close.append(err_w) self.stderr = PipeIOStream(err_r) try: self.proc = subprocess.Popen(*args, **kwargs) except: for fd in pipe_fds: os.close(fd) raise for fd in to_close: os.close(fd) self.pid = self.proc.pid for attr in ["stdin", "stdout", "stderr"]: if not hasattr(self, attr): # don't clobber streams set above setattr(self, attr, getattr(self.proc, attr)) self._exit_callback = None # type: Optional[Callable[[int], None]] self.returncode = None # type: Optional[int] def set_exit_callback(self, callback: Callable[[int], None]) -> None: """Runs ``callback`` when this process exits. The callback takes one argument, the return code of the process. This method uses a ``SIGCHLD`` handler, which is a global setting and may conflict if you have other libraries trying to handle the same signal. If you are using more than one ``IOLoop`` it may be necessary to call `Subprocess.initialize` first to designate one ``IOLoop`` to run the signal handlers. In many cases a close callback on the stdout or stderr streams can be used as an alternative to an exit callback if the signal handler is causing a problem. Availability: Unix """ self._exit_callback = callback Subprocess.initialize() Subprocess._waiting[self.pid] = self Subprocess._try_cleanup_process(self.pid) def wait_for_exit(self, raise_error: bool = True) -> "Future[int]": """Returns a `.Future` which resolves when the process exits. Usage:: ret = yield proc.wait_for_exit() This is a coroutine-friendly alternative to `set_exit_callback` (and a replacement for the blocking `subprocess.Popen.wait`). By default, raises `subprocess.CalledProcessError` if the process has a non-zero exit status. Use ``wait_for_exit(raise_error=False)`` to suppress this behavior and return the exit status without raising. .. versionadded:: 4.2 Availability: Unix """ future = Future() # type: Future[int] def callback(ret: int) -> None: if ret != 0 and raise_error: # Unfortunately we don't have the original args any more. future_set_exception_unless_cancelled( future, CalledProcessError(ret, "unknown") ) else: future_set_result_unless_cancelled(future, ret) self.set_exit_callback(callback) return future @classmethod def initialize(cls) -> None: """Initializes the ``SIGCHLD`` handler. The signal handler is run on an `.IOLoop` to avoid locking issues. Note that the `.IOLoop` used for signal handling need not be the same one used by individual Subprocess objects (as long as the ``IOLoops`` are each running in separate threads). .. versionchanged:: 5.0 The ``io_loop`` argument (deprecated since version 4.1) has been removed. Availability: Unix """ if cls._initialized: return io_loop = ioloop.IOLoop.current() cls._old_sigchld = signal.signal( signal.SIGCHLD, lambda sig, frame: io_loop.add_callback_from_signal(cls._cleanup), ) cls._initialized = True @classmethod def uninitialize(cls) -> None: """Removes the ``SIGCHLD`` handler.""" if not cls._initialized: return signal.signal(signal.SIGCHLD, cls._old_sigchld) cls._initialized = False @classmethod def _cleanup(cls) -> None: for pid in list(cls._waiting.keys()): # make a copy cls._try_cleanup_process(pid) @classmethod def _try_cleanup_process(cls, pid: int) -> None: try: ret_pid, status = os.waitpid(pid, os.WNOHANG) except ChildProcessError: return if ret_pid == 0: return assert ret_pid == pid subproc = cls._waiting.pop(pid) subproc.io_loop.add_callback_from_signal(subproc._set_returncode, status) def _set_returncode(self, status: int) -> None: if os.WIFSIGNALED(status): self.returncode = -os.WTERMSIG(status) else: assert os.WIFEXITED(status) self.returncode = os.WEXITSTATUS(status) # We've taken over wait() duty from the subprocess.Popen # object. If we don't inform it of the process's return code, # it will log a warning at destruction in python 3.6+. self.proc.returncode = self.returncode if self._exit_callback: callback = self._exit_callback self._exit_callback = None callback(self.returncode)
apache-2.0
PythonCharmers/bokeh
bokeh/server/websocket.py
29
3271
from __future__ import absolute_import import logging log = logging.getLogger(__name__) import threading import uuid from tornado import websocket, ioloop from tornado.web import Application from tornado.httpserver import HTTPServer from bokeh import protocol from .wsmanager import WebSocketManager from .zmqsub import Subscriber class WebSocketHandler(websocket.WebSocketHandler): @property def manager(self): return self.application.wsmanager #accept all domains for now.. maybe rethink this later? def check_origin(self, origin): return True def open(self): ## TODO - set client id to continuum client id self.clientid = str(uuid.uuid4()) self.manager.add_socket(self, self.clientid) def on_close(self): self.manager.remove_socket(self.clientid) def on_message(self, message): msgobj = protocol.deserialize_json(message) msgtype = msgobj.get('msgtype') if msgtype == 'subscribe': auth = msgobj['auth'] topic = msgobj['topic'] if self.manager.auth(auth, topic): self.manager.subscribe(self.clientid, topic) msg = protocol.serialize_json( protocol.status_obj(['subscribesuccess', topic, self.clientid]) ) self.write_message(topic + ":" + msg) else: msg = protocol.serialize_web(protocol.error_obj('unauthorized')) self.write_message(topic + ":" + msg) class TornadoWebSocketApplication(Application): def __init__(self, handlers, **settings): super(TornadoWebSocketApplication, self).__init__(handlers, **settings) self.wsmanager = WebSocketManager() zmqaddrs = settings.pop('zmqaddrs') self.subscriber = Subscriber(zmqaddrs, self.wsmanager) def stop(self): ## Hugo: not sure how this is supposed to work ## but apparently you need to stop and then ## start the tornado loop to get it to finish.... ioloop.IOLoop.instance().stop() self.server.stop() self.subscriber.kill = True self.subscriber.thread.join() if hasattr(self, 'thread'): self.thread.join() def start(self, thread=False): def helper(): self.subscriber.start() ioloop.IOLoop.instance().start() if thread: self.thread = threading.Thread(target=helper) self.thread.start() else: helper() def set_server(self, server): self.server = server def listen(self, port, address="", **kwargs): self.server = HTTPServer(self, **kwargs) self.server.listen(port, address) def make_app(url_prefix, zmqaddrs, port): if url_prefix is None or url_prefix == "/": url = "/bokeh/sub/" else: if not url_prefix.startswith("/"): url_prefix = "/" + url_prefix if not url_prefix.endswith("/"): url_prefix = url_prefix + "/" url = url_prefix + "bokeh/sub/" application = TornadoWebSocketApplication([(url, WebSocketHandler)], zmqaddrs=zmqaddrs ) application.listen(port) return application
bsd-3-clause
c3m3gyanesh/RouteFlow-OpenConfig
pox/pox/messenger/test_client.py
26
2396
#!/usr/bin/env python # Copyright 2012 James McCauley # # This file is part of POX. # # POX is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # POX is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with POX. If not, see <http://www.gnu.org/licenses/>. """ This is NOT a POX component. It's a little tool to test out the messenger. """ import socket import threading import json class JSONDestreamer (object): import json decoder = json.JSONDecoder() def __init__ (self, callback = None): self.data = '' self.callback = callback if callback else self.rx def push (self, data): if len(self.data) == 0: data = data.lstrip() self.data += data try: while len(self.data) > 0: r,off = self.decoder.raw_decode(self.data) self.data = self.data[off:].lstrip() self.callback(r) except ValueError: pass def rx (self, data): import json print "Recv:", json.dumps(data, indent=4) jd = JSONDestreamer() done = False def reader (socket): global done while True: d = socket.recv(1024) if d == "": done = True break jd.push(d) cur_chan = None def channel (ch): global cur_chan cur_chan = ch import readline def main (addr = "127.0.0.1", port = 7790): print "Connecting to %s:%i" % (addr,port) port = int(port) sock = socket.create_connection((addr, port)) t = threading.Thread(target=reader, args=(sock,)) t.daemon = True t.start() while not done: try: #print ">", m = raw_input() if len(m) == 0: continue m = eval(m) if not isinstance(m, dict): continue if cur_chan is not None and 'CHANNEL' not in m: m['CHANNEL'] = cur_chan m = json.dumps(m) sock.send(m) except EOFError: break except KeyboardInterrupt: break except: import traceback traceback.print_exc() if __name__ == "__main__": import sys main(*sys.argv[1:])
apache-2.0
sxhao/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/watchlist/changedlinepattern.py
134
1964
# Copyright (C) 2011 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. class ChangedLinePattern: def __init__(self, compile_regex, index_for_zero_value): self._regex = compile_regex self._index_for_zero_value = index_for_zero_value def match(self, path, diff_file): for diff_line in diff_file: if diff_line[self._index_for_zero_value]: continue if self._regex.search(diff_line[2]): return True return False
bsd-3-clause
batxes/4Cin
SHH_WT_models_highres/SHH_WT_models_highres_final_output_0.1_-0.1_5000/mtx1_models/SHH_WT_models_highres30712.py
4
88233
import _surface import chimera try: import chimera.runCommand except: pass from VolumePath import markerset as ms try: from VolumePath import Marker_Set, Link new_marker_set=Marker_Set except: from VolumePath import volume_path_dialog d= volume_path_dialog(True) new_marker_set= d.new_marker_set marker_sets={} surf_sets={} if "particle_0 geometry" not in marker_sets: s=new_marker_set('particle_0 geometry') marker_sets["particle_0 geometry"]=s s= marker_sets["particle_0 geometry"] mark=s.place_marker((1479.39, -169.201, 3414.04), (0.7, 0.7, 0.7), 182.271) if "particle_1 geometry" not in marker_sets: s=new_marker_set('particle_1 geometry') marker_sets["particle_1 geometry"]=s s= marker_sets["particle_1 geometry"] mark=s.place_marker((1393.08, -526.754, 3146.14), (0.7, 0.7, 0.7), 258.199) if "particle_2 geometry" not in marker_sets: s=new_marker_set('particle_2 geometry') marker_sets["particle_2 geometry"]=s s= marker_sets["particle_2 geometry"] mark=s.place_marker((1584.4, -235.041, 3121.91), (0.7, 0.7, 0.7), 123.897) if "particle_3 geometry" not in marker_sets: s=new_marker_set('particle_3 geometry') marker_sets["particle_3 geometry"]=s s= marker_sets["particle_3 geometry"] mark=s.place_marker((1773.35, -585.329, 3217.96), (0.7, 0.7, 0.7), 146.739) if "particle_4 geometry" not in marker_sets: s=new_marker_set('particle_4 geometry') marker_sets["particle_4 geometry"]=s s= marker_sets["particle_4 geometry"] mark=s.place_marker((2088.02, -930.589, 3280.92), (0.7, 0.7, 0.7), 179.098) if "particle_5 geometry" not in marker_sets: s=new_marker_set('particle_5 geometry') marker_sets["particle_5 geometry"]=s s= marker_sets["particle_5 geometry"] mark=s.place_marker((2188.38, -400.419, 3039.72), (0.7, 0.7, 0.7), 148.854) if "particle_6 geometry" not in marker_sets: s=new_marker_set('particle_6 geometry') marker_sets["particle_6 geometry"]=s s= marker_sets["particle_6 geometry"] mark=s.place_marker((2379.86, 58.3319, 2880.82), (0.7, 0.7, 0.7), 196.357) if "particle_7 geometry" not in marker_sets: s=new_marker_set('particle_7 geometry') marker_sets["particle_7 geometry"]=s s= marker_sets["particle_7 geometry"] mark=s.place_marker((2670.43, -173.848, 3268.77), (0.7, 0.7, 0.7), 166.873) if "particle_8 geometry" not in marker_sets: s=new_marker_set('particle_8 geometry') marker_sets["particle_8 geometry"]=s s= marker_sets["particle_8 geometry"] mark=s.place_marker((3036.44, -449.945, 3614.67), (0.7, 0.7, 0.7), 95.4711) if "particle_9 geometry" not in marker_sets: s=new_marker_set('particle_9 geometry') marker_sets["particle_9 geometry"]=s s= marker_sets["particle_9 geometry"] mark=s.place_marker((2978.72, -177.311, 3296.48), (0.7, 0.7, 0.7), 185.401) if "particle_10 geometry" not in marker_sets: s=new_marker_set('particle_10 geometry') marker_sets["particle_10 geometry"]=s s= marker_sets["particle_10 geometry"] mark=s.place_marker((2709.45, 6.66446, 2886.38), (0.7, 0.7, 0.7), 151.984) if "particle_11 geometry" not in marker_sets: s=new_marker_set('particle_11 geometry') marker_sets["particle_11 geometry"]=s s= marker_sets["particle_11 geometry"] mark=s.place_marker((2340, 292.892, 2445.91), (0.7, 0.7, 0.7), 185.612) if "particle_12 geometry" not in marker_sets: s=new_marker_set('particle_12 geometry') marker_sets["particle_12 geometry"]=s s= marker_sets["particle_12 geometry"] mark=s.place_marker((2155.74, 212.024, 2006.32), (0.7, 0.7, 0.7), 210.273) if "particle_13 geometry" not in marker_sets: s=new_marker_set('particle_13 geometry') marker_sets["particle_13 geometry"]=s s= marker_sets["particle_13 geometry"] mark=s.place_marker((2108.93, 65.7955, 1639.81), (0.7, 0.7, 0.7), 106.892) if "particle_14 geometry" not in marker_sets: s=new_marker_set('particle_14 geometry') marker_sets["particle_14 geometry"]=s s= marker_sets["particle_14 geometry"] mark=s.place_marker((2018.29, 46.2055, 1130.76), (0.7, 0.7, 0.7), 202.025) if "particle_15 geometry" not in marker_sets: s=new_marker_set('particle_15 geometry') marker_sets["particle_15 geometry"]=s s= marker_sets["particle_15 geometry"] mark=s.place_marker((1621.58, 63.3436, 638.969), (0.7, 0.7, 0.7), 192.169) if "particle_16 geometry" not in marker_sets: s=new_marker_set('particle_16 geometry') marker_sets["particle_16 geometry"]=s s= marker_sets["particle_16 geometry"] mark=s.place_marker((1108.97, 125.847, 261.505), (0.7, 0.7, 0.7), 241.11) if "particle_17 geometry" not in marker_sets: s=new_marker_set('particle_17 geometry') marker_sets["particle_17 geometry"]=s s= marker_sets["particle_17 geometry"] mark=s.place_marker((678.055, 279.015, 222.495), (0.7, 0.7, 0.7), 128.465) if "particle_18 geometry" not in marker_sets: s=new_marker_set('particle_18 geometry') marker_sets["particle_18 geometry"]=s s= marker_sets["particle_18 geometry"] mark=s.place_marker((215.588, 137.642, 265.978), (0.7, 0.7, 0.7), 217.38) if "particle_19 geometry" not in marker_sets: s=new_marker_set('particle_19 geometry') marker_sets["particle_19 geometry"]=s s= marker_sets["particle_19 geometry"] mark=s.place_marker((-346.869, -272.224, 150.76), (0.7, 0.7, 0.7), 184.555) if "particle_20 geometry" not in marker_sets: s=new_marker_set('particle_20 geometry') marker_sets["particle_20 geometry"]=s s= marker_sets["particle_20 geometry"] mark=s.place_marker((38.8989, 162.481, 424.325), (0.7, 0.7, 0.7), 140.055) if "particle_21 geometry" not in marker_sets: s=new_marker_set('particle_21 geometry') marker_sets["particle_21 geometry"]=s s= marker_sets["particle_21 geometry"] mark=s.place_marker((396.084, 401.379, 286.24), (0.7, 0.7, 0.7), 169.708) if "particle_22 geometry" not in marker_sets: s=new_marker_set('particle_22 geometry') marker_sets["particle_22 geometry"]=s s= marker_sets["particle_22 geometry"] mark=s.place_marker((582.721, 608.033, -43.0802), (0.7, 0.7, 0.7), 184.639) if "particle_23 geometry" not in marker_sets: s=new_marker_set('particle_23 geometry') marker_sets["particle_23 geometry"]=s s= marker_sets["particle_23 geometry"] mark=s.place_marker((569.156, 926.101, 44.1948), (0.7, 0.7, 0.7), 119.286) if "particle_24 geometry" not in marker_sets: s=new_marker_set('particle_24 geometry') marker_sets["particle_24 geometry"]=s s= marker_sets["particle_24 geometry"] mark=s.place_marker((296.023, 984.02, 174.994), (0.7, 0.7, 0.7), 147.754) if "particle_25 geometry" not in marker_sets: s=new_marker_set('particle_25 geometry') marker_sets["particle_25 geometry"]=s s= marker_sets["particle_25 geometry"] mark=s.place_marker((224.65, 760.133, 417.684), (0.7, 0.7, 0.7), 171.4) if "particle_26 geometry" not in marker_sets: s=new_marker_set('particle_26 geometry') marker_sets["particle_26 geometry"]=s s= marker_sets["particle_26 geometry"] mark=s.place_marker((607.056, 693.496, 597.474), (0.7, 0.7, 0.7), 156.341) if "particle_27 geometry" not in marker_sets: s=new_marker_set('particle_27 geometry') marker_sets["particle_27 geometry"]=s s= marker_sets["particle_27 geometry"] mark=s.place_marker((869.971, 835.814, 1108.23), (0.7, 0.7, 0.7), 186.501) if "particle_28 geometry" not in marker_sets: s=new_marker_set('particle_28 geometry') marker_sets["particle_28 geometry"]=s s= marker_sets["particle_28 geometry"] mark=s.place_marker((1134.37, 1037.6, 1545.5), (0.7, 0.7, 0.7), 308.325) if "particle_29 geometry" not in marker_sets: s=new_marker_set('particle_29 geometry') marker_sets["particle_29 geometry"]=s s= marker_sets["particle_29 geometry"] mark=s.place_marker((1514.92, 969.598, 1774.48), (0.7, 0.7, 0.7), 138.617) if "particle_30 geometry" not in marker_sets: s=new_marker_set('particle_30 geometry') marker_sets["particle_30 geometry"]=s s= marker_sets["particle_30 geometry"] mark=s.place_marker((1813.85, 1036.77, 1811.36), (0.7, 0.7, 0.7), 130.03) if "particle_31 geometry" not in marker_sets: s=new_marker_set('particle_31 geometry') marker_sets["particle_31 geometry"]=s s= marker_sets["particle_31 geometry"] mark=s.place_marker((1603.99, 1002.37, 1568.97), (0.7, 0.7, 0.7), 156.552) if "particle_32 geometry" not in marker_sets: s=new_marker_set('particle_32 geometry') marker_sets["particle_32 geometry"]=s s= marker_sets["particle_32 geometry"] mark=s.place_marker((1381.55, 918.066, 1755.51), (0.7, 0.7, 0.7), 183.244) if "particle_33 geometry" not in marker_sets: s=new_marker_set('particle_33 geometry') marker_sets["particle_33 geometry"]=s s= marker_sets["particle_33 geometry"] mark=s.place_marker((1170.19, 860.211, 1912.21), (0.7, 0.7, 0.7), 181.382) if "particle_34 geometry" not in marker_sets: s=new_marker_set('particle_34 geometry') marker_sets["particle_34 geometry"]=s s= marker_sets["particle_34 geometry"] mark=s.place_marker((997.755, 965.178, 1932.66), (0.7, 0.7, 0.7), 101.943) if "particle_35 geometry" not in marker_sets: s=new_marker_set('particle_35 geometry') marker_sets["particle_35 geometry"]=s s= marker_sets["particle_35 geometry"] mark=s.place_marker((648.32, 876.363, 1933.33), (1, 0.7, 0), 138.913) if "particle_36 geometry" not in marker_sets: s=new_marker_set('particle_36 geometry') marker_sets["particle_36 geometry"]=s s= marker_sets["particle_36 geometry"] mark=s.place_marker((1137.42, 152.033, 2560.89), (0.7, 0.7, 0.7), 221.737) if "particle_37 geometry" not in marker_sets: s=new_marker_set('particle_37 geometry') marker_sets["particle_37 geometry"]=s s= marker_sets["particle_37 geometry"] mark=s.place_marker((1394.01, -238.26, 3274.17), (0.7, 0.7, 0.7), 256.38) if "particle_38 geometry" not in marker_sets: s=new_marker_set('particle_38 geometry') marker_sets["particle_38 geometry"]=s s= marker_sets["particle_38 geometry"] mark=s.place_marker((1635.31, 6.66903, 3827.6), (0.7, 0.7, 0.7), 221.694) if "particle_39 geometry" not in marker_sets: s=new_marker_set('particle_39 geometry') marker_sets["particle_39 geometry"]=s s= marker_sets["particle_39 geometry"] mark=s.place_marker((2000.59, 581.307, 3687.98), (0.7, 0.7, 0.7), 259.341) if "particle_40 geometry" not in marker_sets: s=new_marker_set('particle_40 geometry') marker_sets["particle_40 geometry"]=s s= marker_sets["particle_40 geometry"] mark=s.place_marker((2218.94, 1017.47, 3038.78), (0.7, 0.7, 0.7), 117.89) if "particle_41 geometry" not in marker_sets: s=new_marker_set('particle_41 geometry') marker_sets["particle_41 geometry"]=s s= marker_sets["particle_41 geometry"] mark=s.place_marker((2016, 1298.2, 2259.69), (0.7, 0.7, 0.7), 116.071) if "particle_42 geometry" not in marker_sets: s=new_marker_set('particle_42 geometry') marker_sets["particle_42 geometry"]=s s= marker_sets["particle_42 geometry"] mark=s.place_marker((1582.37, 1489.45, 2148.63), (0.7, 0.7, 0.7), 268.224) if "particle_43 geometry" not in marker_sets: s=new_marker_set('particle_43 geometry') marker_sets["particle_43 geometry"]=s s= marker_sets["particle_43 geometry"] mark=s.place_marker((1535.96, 1550.66, 2460.1), (0.7, 0.7, 0.7), 386.918) if "particle_44 geometry" not in marker_sets: s=new_marker_set('particle_44 geometry') marker_sets["particle_44 geometry"]=s s= marker_sets["particle_44 geometry"] mark=s.place_marker((1914.61, 1569.58, 2946.88), (0.7, 0.7, 0.7), 121.316) if "particle_45 geometry" not in marker_sets: s=new_marker_set('particle_45 geometry') marker_sets["particle_45 geometry"]=s s= marker_sets["particle_45 geometry"] mark=s.place_marker((1849.88, 1620, 3389.42), (0.7, 0.7, 0.7), 138.363) if "particle_46 geometry" not in marker_sets: s=new_marker_set('particle_46 geometry') marker_sets["particle_46 geometry"]=s s= marker_sets["particle_46 geometry"] mark=s.place_marker((1280.59, 1465.44, 2994.67), (1, 0.7, 0), 175.207) if "particle_47 geometry" not in marker_sets: s=new_marker_set('particle_47 geometry') marker_sets["particle_47 geometry"]=s s= marker_sets["particle_47 geometry"] mark=s.place_marker((1388.93, 1587.77, 3686.98), (0.7, 0.7, 0.7), 131.468) if "particle_48 geometry" not in marker_sets: s=new_marker_set('particle_48 geometry') marker_sets["particle_48 geometry"]=s s= marker_sets["particle_48 geometry"] mark=s.place_marker((1278.94, 1767.7, 4389.03), (0.7, 0.7, 0.7), 287.894) if "particle_49 geometry" not in marker_sets: s=new_marker_set('particle_49 geometry') marker_sets["particle_49 geometry"]=s s= marker_sets["particle_49 geometry"] mark=s.place_marker((1310.15, 2103.57, 3969.84), (0.7, 0.7, 0.7), 88.1109) if "particle_50 geometry" not in marker_sets: s=new_marker_set('particle_50 geometry') marker_sets["particle_50 geometry"]=s s= marker_sets["particle_50 geometry"] mark=s.place_marker((1443.84, 2080.69, 3379.68), (0.7, 0.7, 0.7), 145.385) if "particle_51 geometry" not in marker_sets: s=new_marker_set('particle_51 geometry') marker_sets["particle_51 geometry"]=s s= marker_sets["particle_51 geometry"] mark=s.place_marker((1564.57, 2144.04, 3211.5), (0.7, 0.7, 0.7), 155.452) if "particle_52 geometry" not in marker_sets: s=new_marker_set('particle_52 geometry') marker_sets["particle_52 geometry"]=s s= marker_sets["particle_52 geometry"] mark=s.place_marker((1588.37, 2432.38, 3768.3), (0.7, 0.7, 0.7), 145.512) if "particle_53 geometry" not in marker_sets: s=new_marker_set('particle_53 geometry') marker_sets["particle_53 geometry"]=s s= marker_sets["particle_53 geometry"] mark=s.place_marker((1681.25, 2663.96, 4217.83), (0.7, 0.7, 0.7), 99.9972) if "particle_54 geometry" not in marker_sets: s=new_marker_set('particle_54 geometry') marker_sets["particle_54 geometry"]=s s= marker_sets["particle_54 geometry"] mark=s.place_marker((1849.74, 2806.03, 4605.68), (0.7, 0.7, 0.7), 327.529) if "particle_55 geometry" not in marker_sets: s=new_marker_set('particle_55 geometry') marker_sets["particle_55 geometry"]=s s= marker_sets["particle_55 geometry"] mark=s.place_marker((2299.59, 2691.97, 4156.29), (0.7, 0.7, 0.7), 137.983) if "particle_56 geometry" not in marker_sets: s=new_marker_set('particle_56 geometry') marker_sets["particle_56 geometry"]=s s= marker_sets["particle_56 geometry"] mark=s.place_marker((2254.87, 2628.72, 3651.14), (0.7, 0.7, 0.7), 83.3733) if "particle_57 geometry" not in marker_sets: s=new_marker_set('particle_57 geometry') marker_sets["particle_57 geometry"]=s s= marker_sets["particle_57 geometry"] mark=s.place_marker((2093.95, 2512.5, 3113.36), (0.7, 0.7, 0.7), 101.562) if "particle_58 geometry" not in marker_sets: s=new_marker_set('particle_58 geometry') marker_sets["particle_58 geometry"]=s s= marker_sets["particle_58 geometry"] mark=s.place_marker((1867.96, 2306.16, 2685.19), (0.7, 0.7, 0.7), 165.689) if "particle_59 geometry" not in marker_sets: s=new_marker_set('particle_59 geometry') marker_sets["particle_59 geometry"]=s s= marker_sets["particle_59 geometry"] mark=s.place_marker((1566.85, 2323.28, 2767.79), (0.7, 0.7, 0.7), 136.925) if "particle_60 geometry" not in marker_sets: s=new_marker_set('particle_60 geometry') marker_sets["particle_60 geometry"]=s s= marker_sets["particle_60 geometry"] mark=s.place_marker((1453.31, 2342.77, 2830.24), (0.7, 0.7, 0.7), 123.389) if "particle_61 geometry" not in marker_sets: s=new_marker_set('particle_61 geometry') marker_sets["particle_61 geometry"]=s s= marker_sets["particle_61 geometry"] mark=s.place_marker((1648.92, 2661.65, 3075.21), (0.7, 0.7, 0.7), 184.47) if "particle_62 geometry" not in marker_sets: s=new_marker_set('particle_62 geometry') marker_sets["particle_62 geometry"]=s s= marker_sets["particle_62 geometry"] mark=s.place_marker((1890.19, 3306.26, 3476.99), (0.7, 0.7, 0.7), 148.473) if "particle_63 geometry" not in marker_sets: s=new_marker_set('particle_63 geometry') marker_sets["particle_63 geometry"]=s s= marker_sets["particle_63 geometry"] mark=s.place_marker((2092.1, 4117.11, 3990.75), (0.7, 0.7, 0.7), 241.406) if "particle_64 geometry" not in marker_sets: s=new_marker_set('particle_64 geometry') marker_sets["particle_64 geometry"]=s s= marker_sets["particle_64 geometry"] mark=s.place_marker((2378.33, 3538.16, 3870.73), (0.7, 0.7, 0.7), 182.736) if "particle_65 geometry" not in marker_sets: s=new_marker_set('particle_65 geometry') marker_sets["particle_65 geometry"]=s s= marker_sets["particle_65 geometry"] mark=s.place_marker((2364.1, 3082.09, 3787.57), (0.7, 0.7, 0.7), 166.62) if "particle_66 geometry" not in marker_sets: s=new_marker_set('particle_66 geometry') marker_sets["particle_66 geometry"]=s s= marker_sets["particle_66 geometry"] mark=s.place_marker((2100.56, 3044.91, 3659.53), (0.7, 0.7, 0.7), 113.872) if "particle_67 geometry" not in marker_sets: s=new_marker_set('particle_67 geometry') marker_sets["particle_67 geometry"]=s s= marker_sets["particle_67 geometry"] mark=s.place_marker((1958.99, 2802.29, 3495.64), (0.7, 0.7, 0.7), 110.065) if "particle_68 geometry" not in marker_sets: s=new_marker_set('particle_68 geometry') marker_sets["particle_68 geometry"]=s s= marker_sets["particle_68 geometry"] mark=s.place_marker((1672.71, 2553.54, 3487.93), (0.7, 0.7, 0.7), 150.08) if "particle_69 geometry" not in marker_sets: s=new_marker_set('particle_69 geometry') marker_sets["particle_69 geometry"]=s s= marker_sets["particle_69 geometry"] mark=s.place_marker((1288.8, 2304.71, 3590.42), (0.7, 0.7, 0.7), 118.525) if "particle_70 geometry" not in marker_sets: s=new_marker_set('particle_70 geometry') marker_sets["particle_70 geometry"]=s s= marker_sets["particle_70 geometry"] mark=s.place_marker((783.399, 2213.15, 3749.42), (0.7, 0.7, 0.7), 163.955) if "particle_71 geometry" not in marker_sets: s=new_marker_set('particle_71 geometry') marker_sets["particle_71 geometry"]=s s= marker_sets["particle_71 geometry"] mark=s.place_marker((579.224, 2483, 3931.43), (0.7, 0.7, 0.7), 170.131) if "particle_72 geometry" not in marker_sets: s=new_marker_set('particle_72 geometry') marker_sets["particle_72 geometry"]=s s= marker_sets["particle_72 geometry"] mark=s.place_marker((1078.19, 3035.29, 4083.1), (0.7, 0.7, 0.7), 78.2127) if "particle_73 geometry" not in marker_sets: s=new_marker_set('particle_73 geometry') marker_sets["particle_73 geometry"]=s s= marker_sets["particle_73 geometry"] mark=s.place_marker((1715.93, 3538.83, 4243.99), (0.7, 0.7, 0.7), 251.896) if "particle_74 geometry" not in marker_sets: s=new_marker_set('particle_74 geometry') marker_sets["particle_74 geometry"]=s s= marker_sets["particle_74 geometry"] mark=s.place_marker((2344.83, 3766.91, 4341.43), (0.7, 0.7, 0.7), 167.55) if "particle_75 geometry" not in marker_sets: s=new_marker_set('particle_75 geometry') marker_sets["particle_75 geometry"]=s s= marker_sets["particle_75 geometry"] mark=s.place_marker((2753.89, 3723.18, 4299.68), (0.7, 0.7, 0.7), 167.846) if "particle_76 geometry" not in marker_sets: s=new_marker_set('particle_76 geometry') marker_sets["particle_76 geometry"]=s s= marker_sets["particle_76 geometry"] mark=s.place_marker((2487.97, 4134.06, 4144.37), (0.7, 0.7, 0.7), 259.68) if "particle_77 geometry" not in marker_sets: s=new_marker_set('particle_77 geometry') marker_sets["particle_77 geometry"]=s s= marker_sets["particle_77 geometry"] mark=s.place_marker((2015.84, 4209.88, 4210.96), (0.7, 0.7, 0.7), 80.2854) if "particle_78 geometry" not in marker_sets: s=new_marker_set('particle_78 geometry') marker_sets["particle_78 geometry"]=s s= marker_sets["particle_78 geometry"] mark=s.place_marker((1908.26, 4338.06, 4376.52), (0.7, 0.7, 0.7), 82.4427) if "particle_79 geometry" not in marker_sets: s=new_marker_set('particle_79 geometry') marker_sets["particle_79 geometry"]=s s= marker_sets["particle_79 geometry"] mark=s.place_marker((1932.78, 4718.39, 4369.34), (0.7, 0.7, 0.7), 212.811) if "particle_80 geometry" not in marker_sets: s=new_marker_set('particle_80 geometry') marker_sets["particle_80 geometry"]=s s= marker_sets["particle_80 geometry"] mark=s.place_marker((2149.76, 4775.93, 3634.42), (0.7, 0.7, 0.7), 176.391) if "particle_81 geometry" not in marker_sets: s=new_marker_set('particle_81 geometry') marker_sets["particle_81 geometry"]=s s= marker_sets["particle_81 geometry"] mark=s.place_marker((2197.74, 4299.63, 3081.29), (0.7, 0.7, 0.7), 99.3204) if "particle_82 geometry" not in marker_sets: s=new_marker_set('particle_82 geometry') marker_sets["particle_82 geometry"]=s s= marker_sets["particle_82 geometry"] mark=s.place_marker((1937.46, 3832.22, 2804.04), (0.7, 0.7, 0.7), 166.62) if "particle_83 geometry" not in marker_sets: s=new_marker_set('particle_83 geometry') marker_sets["particle_83 geometry"]=s s= marker_sets["particle_83 geometry"] mark=s.place_marker((1738.28, 3757.2, 2563.71), (0.7, 0.7, 0.7), 102.831) if "particle_84 geometry" not in marker_sets: s=new_marker_set('particle_84 geometry') marker_sets["particle_84 geometry"]=s s= marker_sets["particle_84 geometry"] mark=s.place_marker((1904.9, 4573.09, 2834.88), (0.7, 0.7, 0.7), 65.0997) if "particle_85 geometry" not in marker_sets: s=new_marker_set('particle_85 geometry') marker_sets["particle_85 geometry"]=s s= marker_sets["particle_85 geometry"] mark=s.place_marker((2109.52, 4306.55, 3252.36), (0.7, 0.7, 0.7), 92.1294) if "particle_86 geometry" not in marker_sets: s=new_marker_set('particle_86 geometry') marker_sets["particle_86 geometry"]=s s= marker_sets["particle_86 geometry"] mark=s.place_marker((2217.05, 3772.21, 3471.02), (0.7, 0.7, 0.7), 194.791) if "particle_87 geometry" not in marker_sets: s=new_marker_set('particle_87 geometry') marker_sets["particle_87 geometry"]=s s= marker_sets["particle_87 geometry"] mark=s.place_marker((2432.67, 3410.26, 3616.38), (0.7, 0.7, 0.7), 120.766) if "particle_88 geometry" not in marker_sets: s=new_marker_set('particle_88 geometry') marker_sets["particle_88 geometry"]=s s= marker_sets["particle_88 geometry"] mark=s.place_marker((2788.21, 3813.68, 3843.45), (0.7, 0.7, 0.7), 217.803) if "particle_89 geometry" not in marker_sets: s=new_marker_set('particle_89 geometry') marker_sets["particle_89 geometry"]=s s= marker_sets["particle_89 geometry"] mark=s.place_marker((2405.04, 3917.75, 3833.91), (0.7, 0.7, 0.7), 115.775) if "particle_90 geometry" not in marker_sets: s=new_marker_set('particle_90 geometry') marker_sets["particle_90 geometry"]=s s= marker_sets["particle_90 geometry"] mark=s.place_marker((2030.19, 3716.06, 3844.88), (0.7, 0.7, 0.7), 115.648) if "particle_91 geometry" not in marker_sets: s=new_marker_set('particle_91 geometry') marker_sets["particle_91 geometry"]=s s= marker_sets["particle_91 geometry"] mark=s.place_marker((2059.77, 3479.73, 3622.18), (0.7, 0.7, 0.7), 83.8386) if "particle_92 geometry" not in marker_sets: s=new_marker_set('particle_92 geometry') marker_sets["particle_92 geometry"]=s s= marker_sets["particle_92 geometry"] mark=s.place_marker((2399.28, 3343.37, 3599.33), (0.7, 0.7, 0.7), 124.32) if "particle_93 geometry" not in marker_sets: s=new_marker_set('particle_93 geometry') marker_sets["particle_93 geometry"]=s s= marker_sets["particle_93 geometry"] mark=s.place_marker((2717.01, 3079.15, 3794.99), (0.7, 0.7, 0.7), 185.993) if "particle_94 geometry" not in marker_sets: s=new_marker_set('particle_94 geometry') marker_sets["particle_94 geometry"]=s s= marker_sets["particle_94 geometry"] mark=s.place_marker((2930.41, 2883.62, 4375.72), (0.7, 0.7, 0.7), 238.826) if "particle_95 geometry" not in marker_sets: s=new_marker_set('particle_95 geometry') marker_sets["particle_95 geometry"]=s s= marker_sets["particle_95 geometry"] mark=s.place_marker((2789.32, 2874.49, 4903.02), (0.7, 0.7, 0.7), 128.465) if "particle_96 geometry" not in marker_sets: s=new_marker_set('particle_96 geometry') marker_sets["particle_96 geometry"]=s s= marker_sets["particle_96 geometry"] mark=s.place_marker((2241.98, 3052.89, 4626.16), (0.7, 0.7, 0.7), 203.209) if "particle_97 geometry" not in marker_sets: s=new_marker_set('particle_97 geometry') marker_sets["particle_97 geometry"]=s s= marker_sets["particle_97 geometry"] mark=s.place_marker((2144.54, 3149.63, 4148.95), (0.7, 0.7, 0.7), 160.486) if "particle_98 geometry" not in marker_sets: s=new_marker_set('particle_98 geometry') marker_sets["particle_98 geometry"]=s s= marker_sets["particle_98 geometry"] mark=s.place_marker((2484.28, 3083.35, 4198.27), (0.7, 0.7, 0.7), 149.277) if "particle_99 geometry" not in marker_sets: s=new_marker_set('particle_99 geometry') marker_sets["particle_99 geometry"]=s s= marker_sets["particle_99 geometry"] mark=s.place_marker((2564.57, 3353.95, 4677.17), (0.7, 0.7, 0.7), 35.7435) if "particle_100 geometry" not in marker_sets: s=new_marker_set('particle_100 geometry') marker_sets["particle_100 geometry"]=s s= marker_sets["particle_100 geometry"] mark=s.place_marker((1971.1, 3157.92, 3909.36), (0.7, 0.7, 0.7), 98.3898) if "particle_101 geometry" not in marker_sets: s=new_marker_set('particle_101 geometry') marker_sets["particle_101 geometry"]=s s= marker_sets["particle_101 geometry"] mark=s.place_marker((1560.02, 2885.35, 3000.94), (0.7, 0.7, 0.7), 188.404) if "particle_102 geometry" not in marker_sets: s=new_marker_set('particle_102 geometry') marker_sets["particle_102 geometry"]=s s= marker_sets["particle_102 geometry"] mark=s.place_marker((1724.94, 2765.32, 2544.25), (0.7, 0.7, 0.7), 110.318) if "particle_103 geometry" not in marker_sets: s=new_marker_set('particle_103 geometry') marker_sets["particle_103 geometry"]=s s= marker_sets["particle_103 geometry"] mark=s.place_marker((1903.87, 3054.29, 2726), (0.7, 0.7, 0.7), 127.534) if "particle_104 geometry" not in marker_sets: s=new_marker_set('particle_104 geometry') marker_sets["particle_104 geometry"]=s s= marker_sets["particle_104 geometry"] mark=s.place_marker((2012.68, 3223.29, 3038.73), (0.7, 0.7, 0.7), 91.368) if "particle_105 geometry" not in marker_sets: s=new_marker_set('particle_105 geometry') marker_sets["particle_105 geometry"]=s s= marker_sets["particle_105 geometry"] mark=s.place_marker((2102.74, 3273.47, 3412.11), (0.7, 0.7, 0.7), 131.045) if "particle_106 geometry" not in marker_sets: s=new_marker_set('particle_106 geometry') marker_sets["particle_106 geometry"]=s s= marker_sets["particle_106 geometry"] mark=s.place_marker((2127.31, 3125.62, 3807.32), (0.7, 0.7, 0.7), 143.608) if "particle_107 geometry" not in marker_sets: s=new_marker_set('particle_107 geometry') marker_sets["particle_107 geometry"]=s s= marker_sets["particle_107 geometry"] mark=s.place_marker((2443.11, 2967.37, 3978.51), (0.7, 0.7, 0.7), 135.783) if "particle_108 geometry" not in marker_sets: s=new_marker_set('particle_108 geometry') marker_sets["particle_108 geometry"]=s s= marker_sets["particle_108 geometry"] mark=s.place_marker((2737.02, 2845.16, 4085.87), (0.7, 0.7, 0.7), 92.5947) if "particle_109 geometry" not in marker_sets: s=new_marker_set('particle_109 geometry') marker_sets["particle_109 geometry"]=s s= marker_sets["particle_109 geometry"] mark=s.place_marker((2852.44, 2847.65, 3834.28), (0.7, 0.7, 0.7), 150.123) if "particle_110 geometry" not in marker_sets: s=new_marker_set('particle_110 geometry') marker_sets["particle_110 geometry"]=s s= marker_sets["particle_110 geometry"] mark=s.place_marker((2948.7, 2732.31, 3726.27), (0.7, 0.7, 0.7), 121.57) if "particle_111 geometry" not in marker_sets: s=new_marker_set('particle_111 geometry') marker_sets["particle_111 geometry"]=s s= marker_sets["particle_111 geometry"] mark=s.place_marker((3262.68, 2821.8, 3745.37), (0.7, 0.7, 0.7), 104.777) if "particle_112 geometry" not in marker_sets: s=new_marker_set('particle_112 geometry') marker_sets["particle_112 geometry"]=s s= marker_sets["particle_112 geometry"] mark=s.place_marker((3183.85, 2715.75, 3363.56), (0.7, 0.7, 0.7), 114.844) if "particle_113 geometry" not in marker_sets: s=new_marker_set('particle_113 geometry') marker_sets["particle_113 geometry"]=s s= marker_sets["particle_113 geometry"] mark=s.place_marker((3092.64, 2611.66, 2957.58), (0.7, 0.7, 0.7), 150.588) if "particle_114 geometry" not in marker_sets: s=new_marker_set('particle_114 geometry') marker_sets["particle_114 geometry"]=s s= marker_sets["particle_114 geometry"] mark=s.place_marker((2714.23, 2686.02, 2843.08), (0.7, 0.7, 0.7), 103.55) if "particle_115 geometry" not in marker_sets: s=new_marker_set('particle_115 geometry') marker_sets["particle_115 geometry"]=s s= marker_sets["particle_115 geometry"] mark=s.place_marker((2383.57, 3089.59, 2799.42), (0.7, 0.7, 0.7), 215.392) if "particle_116 geometry" not in marker_sets: s=new_marker_set('particle_116 geometry') marker_sets["particle_116 geometry"]=s s= marker_sets["particle_116 geometry"] mark=s.place_marker((2001.33, 3436.75, 2633.26), (0.7, 0.7, 0.7), 99.9126) if "particle_117 geometry" not in marker_sets: s=new_marker_set('particle_117 geometry') marker_sets["particle_117 geometry"]=s s= marker_sets["particle_117 geometry"] mark=s.place_marker((1990.05, 4168.68, 2677.17), (0.7, 0.7, 0.7), 99.7857) if "particle_118 geometry" not in marker_sets: s=new_marker_set('particle_118 geometry') marker_sets["particle_118 geometry"]=s s= marker_sets["particle_118 geometry"] mark=s.place_marker((1952.69, 4682.84, 2912.79), (0.7, 0.7, 0.7), 109.98) if "particle_119 geometry" not in marker_sets: s=new_marker_set('particle_119 geometry') marker_sets["particle_119 geometry"]=s s= marker_sets["particle_119 geometry"] mark=s.place_marker((2240.33, 4251.26, 2811.63), (0.7, 0.7, 0.7), 102.831) if "particle_120 geometry" not in marker_sets: s=new_marker_set('particle_120 geometry') marker_sets["particle_120 geometry"]=s s= marker_sets["particle_120 geometry"] mark=s.place_marker((2319.73, 3864.81, 2888.1), (0.7, 0.7, 0.7), 103.593) if "particle_121 geometry" not in marker_sets: s=new_marker_set('particle_121 geometry') marker_sets["particle_121 geometry"]=s s= marker_sets["particle_121 geometry"] mark=s.place_marker((2399.45, 3465.83, 3138.97), (0.7, 0.7, 0.7), 173.472) if "particle_122 geometry" not in marker_sets: s=new_marker_set('particle_122 geometry') marker_sets["particle_122 geometry"]=s s= marker_sets["particle_122 geometry"] mark=s.place_marker((2499.92, 3412.31, 3682.73), (0.7, 0.7, 0.7), 113.575) if "particle_123 geometry" not in marker_sets: s=new_marker_set('particle_123 geometry') marker_sets["particle_123 geometry"]=s s= marker_sets["particle_123 geometry"] mark=s.place_marker((2647.6, 3058.53, 3928.94), (0.7, 0.7, 0.7), 128.296) if "particle_124 geometry" not in marker_sets: s=new_marker_set('particle_124 geometry') marker_sets["particle_124 geometry"]=s s= marker_sets["particle_124 geometry"] mark=s.place_marker((2880.54, 2757.37, 4147.64), (0.7, 0.7, 0.7), 145.004) if "particle_125 geometry" not in marker_sets: s=new_marker_set('particle_125 geometry') marker_sets["particle_125 geometry"]=s s= marker_sets["particle_125 geometry"] mark=s.place_marker((3129.9, 2311.24, 4203.39), (0.7, 0.7, 0.7), 148.261) if "particle_126 geometry" not in marker_sets: s=new_marker_set('particle_126 geometry') marker_sets["particle_126 geometry"]=s s= marker_sets["particle_126 geometry"] mark=s.place_marker((3430.95, 1966.25, 4612.25), (0.7, 0.7, 0.7), 127.704) if "particle_127 geometry" not in marker_sets: s=new_marker_set('particle_127 geometry') marker_sets["particle_127 geometry"]=s s= marker_sets["particle_127 geometry"] mark=s.place_marker((3540.33, 1732.41, 5125.27), (0.7, 0.7, 0.7), 129.607) if "particle_128 geometry" not in marker_sets: s=new_marker_set('particle_128 geometry') marker_sets["particle_128 geometry"]=s s= marker_sets["particle_128 geometry"] mark=s.place_marker((3176.98, 2079.22, 5142.23), (0.7, 0.7, 0.7), 139.759) if "particle_129 geometry" not in marker_sets: s=new_marker_set('particle_129 geometry') marker_sets["particle_129 geometry"]=s s= marker_sets["particle_129 geometry"] mark=s.place_marker((2686.1, 2435.68, 4887.37), (0.7, 0.7, 0.7), 118.567) if "particle_130 geometry" not in marker_sets: s=new_marker_set('particle_130 geometry') marker_sets["particle_130 geometry"]=s s= marker_sets["particle_130 geometry"] mark=s.place_marker((2714.42, 2812.2, 4674.94), (0.7, 0.7, 0.7), 136.164) if "particle_131 geometry" not in marker_sets: s=new_marker_set('particle_131 geometry') marker_sets["particle_131 geometry"]=s s= marker_sets["particle_131 geometry"] mark=s.place_marker((2709.16, 3052.4, 4276.11), (0.7, 0.7, 0.7), 121.655) if "particle_132 geometry" not in marker_sets: s=new_marker_set('particle_132 geometry') marker_sets["particle_132 geometry"]=s s= marker_sets["particle_132 geometry"] mark=s.place_marker((2831.36, 3218.07, 3878.33), (0.7, 0.7, 0.7), 127.492) if "particle_133 geometry" not in marker_sets: s=new_marker_set('particle_133 geometry') marker_sets["particle_133 geometry"]=s s= marker_sets["particle_133 geometry"] mark=s.place_marker((3083.98, 3552.99, 3735.16), (0.7, 0.7, 0.7), 138.617) if "particle_134 geometry" not in marker_sets: s=new_marker_set('particle_134 geometry') marker_sets["particle_134 geometry"]=s s= marker_sets["particle_134 geometry"] mark=s.place_marker((3185.83, 3591.68, 3367.69), (0.7, 0.7, 0.7), 120.766) if "particle_135 geometry" not in marker_sets: s=new_marker_set('particle_135 geometry') marker_sets["particle_135 geometry"]=s s= marker_sets["particle_135 geometry"] mark=s.place_marker((3010.46, 3640.04, 3091.51), (0.7, 0.7, 0.7), 145.893) if "particle_136 geometry" not in marker_sets: s=new_marker_set('particle_136 geometry') marker_sets["particle_136 geometry"]=s s= marker_sets["particle_136 geometry"] mark=s.place_marker((2618.52, 3371.59, 3077.89), (0.7, 0.7, 0.7), 185.02) if "particle_137 geometry" not in marker_sets: s=new_marker_set('particle_137 geometry') marker_sets["particle_137 geometry"]=s s= marker_sets["particle_137 geometry"] mark=s.place_marker((2321.04, 2983.41, 2862.89), (0.7, 0.7, 0.7), 221.314) if "particle_138 geometry" not in marker_sets: s=new_marker_set('particle_138 geometry') marker_sets["particle_138 geometry"]=s s= marker_sets["particle_138 geometry"] mark=s.place_marker((2273.81, 2548.93, 2632.92), (0.7, 0.7, 0.7), 165.139) if "particle_139 geometry" not in marker_sets: s=new_marker_set('particle_139 geometry') marker_sets["particle_139 geometry"]=s s= marker_sets["particle_139 geometry"] mark=s.place_marker((2032.1, 2638.25, 2682.12), (0.7, 0.7, 0.7), 179.437) if "particle_140 geometry" not in marker_sets: s=new_marker_set('particle_140 geometry') marker_sets["particle_140 geometry"]=s s= marker_sets["particle_140 geometry"] mark=s.place_marker((1872.61, 3024.06, 2705.01), (0.7, 0.7, 0.7), 137.898) if "particle_141 geometry" not in marker_sets: s=new_marker_set('particle_141 geometry') marker_sets["particle_141 geometry"]=s s= marker_sets["particle_141 geometry"] mark=s.place_marker((1884.49, 3347.79, 2780.46), (0.7, 0.7, 0.7), 124.658) if "particle_142 geometry" not in marker_sets: s=new_marker_set('particle_142 geometry') marker_sets["particle_142 geometry"]=s s= marker_sets["particle_142 geometry"] mark=s.place_marker((2162.88, 3567.44, 2815.21), (0.7, 0.7, 0.7), 97.7553) if "particle_143 geometry" not in marker_sets: s=new_marker_set('particle_143 geometry') marker_sets["particle_143 geometry"]=s s= marker_sets["particle_143 geometry"] mark=s.place_marker((2379.8, 3783.53, 2867.85), (0.7, 0.7, 0.7), 92.9331) if "particle_144 geometry" not in marker_sets: s=new_marker_set('particle_144 geometry') marker_sets["particle_144 geometry"]=s s= marker_sets["particle_144 geometry"] mark=s.place_marker((2512.87, 4059.25, 3000.16), (0.7, 0.7, 0.7), 123.135) if "particle_145 geometry" not in marker_sets: s=new_marker_set('particle_145 geometry') marker_sets["particle_145 geometry"]=s s= marker_sets["particle_145 geometry"] mark=s.place_marker((2307.4, 3744.08, 2895.22), (0.7, 0.7, 0.7), 125.716) if "particle_146 geometry" not in marker_sets: s=new_marker_set('particle_146 geometry') marker_sets["particle_146 geometry"]=s s= marker_sets["particle_146 geometry"] mark=s.place_marker((2309.86, 3413.42, 2916.72), (0.7, 0.7, 0.7), 127.534) if "particle_147 geometry" not in marker_sets: s=new_marker_set('particle_147 geometry') marker_sets["particle_147 geometry"]=s s= marker_sets["particle_147 geometry"] mark=s.place_marker((2390.56, 3235.55, 3160.67), (0.7, 0.7, 0.7), 94.9212) if "particle_148 geometry" not in marker_sets: s=new_marker_set('particle_148 geometry') marker_sets["particle_148 geometry"]=s s= marker_sets["particle_148 geometry"] mark=s.place_marker((2271.13, 2810.69, 3079.01), (0.7, 0.7, 0.7), 137.644) if "particle_149 geometry" not in marker_sets: s=new_marker_set('particle_149 geometry') marker_sets["particle_149 geometry"]=s s= marker_sets["particle_149 geometry"] mark=s.place_marker((2288.52, 2445.11, 3062.71), (0.7, 0.7, 0.7), 149.277) if "particle_150 geometry" not in marker_sets: s=new_marker_set('particle_150 geometry') marker_sets["particle_150 geometry"]=s s= marker_sets["particle_150 geometry"] mark=s.place_marker((2616.2, 2517.03, 2940.46), (0.7, 0.7, 0.7), 103.677) if "particle_151 geometry" not in marker_sets: s=new_marker_set('particle_151 geometry') marker_sets["particle_151 geometry"]=s s= marker_sets["particle_151 geometry"] mark=s.place_marker((3062.36, 2701.73, 2989.13), (0.7, 0.7, 0.7), 99.6588) if "particle_152 geometry" not in marker_sets: s=new_marker_set('particle_152 geometry') marker_sets["particle_152 geometry"]=s s= marker_sets["particle_152 geometry"] mark=s.place_marker((3414.51, 2816.56, 3040.99), (0.7, 0.7, 0.7), 134.133) if "particle_153 geometry" not in marker_sets: s=new_marker_set('particle_153 geometry') marker_sets["particle_153 geometry"]=s s= marker_sets["particle_153 geometry"] mark=s.place_marker((3175.21, 2938.98, 2828.14), (0.7, 0.7, 0.7), 173.007) if "particle_154 geometry" not in marker_sets: s=new_marker_set('particle_154 geometry') marker_sets["particle_154 geometry"]=s s= marker_sets["particle_154 geometry"] mark=s.place_marker((2687.22, 2681.56, 2677.67), (0.7, 0.7, 0.7), 141.028) if "particle_155 geometry" not in marker_sets: s=new_marker_set('particle_155 geometry') marker_sets["particle_155 geometry"]=s s= marker_sets["particle_155 geometry"] mark=s.place_marker((2246.97, 2522.62, 2628.99), (0.7, 0.7, 0.7), 161.121) if "particle_156 geometry" not in marker_sets: s=new_marker_set('particle_156 geometry') marker_sets["particle_156 geometry"]=s s= marker_sets["particle_156 geometry"] mark=s.place_marker((2092.79, 2473.88, 2940.03), (0.7, 0.7, 0.7), 119.582) if "particle_157 geometry" not in marker_sets: s=new_marker_set('particle_157 geometry') marker_sets["particle_157 geometry"]=s s= marker_sets["particle_157 geometry"] mark=s.place_marker((2259.07, 2727.22, 3211.4), (0.7, 0.7, 0.7), 137.094) if "particle_158 geometry" not in marker_sets: s=new_marker_set('particle_158 geometry') marker_sets["particle_158 geometry"]=s s= marker_sets["particle_158 geometry"] mark=s.place_marker((2336.25, 3171.04, 3431.3), (0.7, 0.7, 0.7), 149.234) if "particle_159 geometry" not in marker_sets: s=new_marker_set('particle_159 geometry') marker_sets["particle_159 geometry"]=s s= marker_sets["particle_159 geometry"] mark=s.place_marker((2042.86, 3366.83, 3174.46), (0.7, 0.7, 0.7), 151.011) if "particle_160 geometry" not in marker_sets: s=new_marker_set('particle_160 geometry') marker_sets["particle_160 geometry"]=s s= marker_sets["particle_160 geometry"] mark=s.place_marker((1786.54, 3262.42, 2725.84), (0.7, 0.7, 0.7), 184.216) if "particle_161 geometry" not in marker_sets: s=new_marker_set('particle_161 geometry') marker_sets["particle_161 geometry"]=s s= marker_sets["particle_161 geometry"] mark=s.place_marker((2044.45, 3297.95, 2404.28), (0.7, 0.7, 0.7), 170.596) if "particle_162 geometry" not in marker_sets: s=new_marker_set('particle_162 geometry') marker_sets["particle_162 geometry"]=s s= marker_sets["particle_162 geometry"] mark=s.place_marker((2357.31, 3803.93, 2637.14), (0.7, 0.7, 0.7), 215.603) if "particle_163 geometry" not in marker_sets: s=new_marker_set('particle_163 geometry') marker_sets["particle_163 geometry"]=s s= marker_sets["particle_163 geometry"] mark=s.place_marker((2729.98, 4521.89, 3015.27), (0.7, 0.7, 0.7), 79.0164) if "particle_164 geometry" not in marker_sets: s=new_marker_set('particle_164 geometry') marker_sets["particle_164 geometry"]=s s= marker_sets["particle_164 geometry"] mark=s.place_marker((3071.16, 4476.25, 2983.12), (0.7, 0.7, 0.7), 77.2821) if "particle_165 geometry" not in marker_sets: s=new_marker_set('particle_165 geometry') marker_sets["particle_165 geometry"]=s s= marker_sets["particle_165 geometry"] mark=s.place_marker((3162.73, 4131.25, 2973.01), (0.7, 0.7, 0.7), 188.658) if "particle_166 geometry" not in marker_sets: s=new_marker_set('particle_166 geometry') marker_sets["particle_166 geometry"]=s s= marker_sets["particle_166 geometry"] mark=s.place_marker((3353.36, 4040.07, 2739.69), (0.7, 0.7, 0.7), 115.437) if "particle_167 geometry" not in marker_sets: s=new_marker_set('particle_167 geometry') marker_sets["particle_167 geometry"]=s s= marker_sets["particle_167 geometry"] mark=s.place_marker((2949.27, 3588.06, 2716.32), (0.7, 0.7, 0.7), 88.4916) if "particle_168 geometry" not in marker_sets: s=new_marker_set('particle_168 geometry') marker_sets["particle_168 geometry"]=s s= marker_sets["particle_168 geometry"] mark=s.place_marker((2524.82, 3124.39, 2697.43), (0.7, 0.7, 0.7), 108.88) if "particle_169 geometry" not in marker_sets: s=new_marker_set('particle_169 geometry') marker_sets["particle_169 geometry"]=s s= marker_sets["particle_169 geometry"] mark=s.place_marker((2211.93, 2989.64, 2812.63), (0.7, 0.7, 0.7), 172.119) if "particle_170 geometry" not in marker_sets: s=new_marker_set('particle_170 geometry') marker_sets["particle_170 geometry"]=s s= marker_sets["particle_170 geometry"] mark=s.place_marker((2401.74, 3388.26, 3013.26), (0.7, 0.7, 0.7), 139.505) if "particle_171 geometry" not in marker_sets: s=new_marker_set('particle_171 geometry') marker_sets["particle_171 geometry"]=s s= marker_sets["particle_171 geometry"] mark=s.place_marker((2603.25, 3786.45, 3203.2), (0.7, 0.7, 0.7), 92.7639) if "particle_172 geometry" not in marker_sets: s=new_marker_set('particle_172 geometry') marker_sets["particle_172 geometry"]=s s= marker_sets["particle_172 geometry"] mark=s.place_marker((2371.66, 3802.91, 3267.8), (0.7, 0.7, 0.7), 89.8452) if "particle_173 geometry" not in marker_sets: s=new_marker_set('particle_173 geometry') marker_sets["particle_173 geometry"]=s s= marker_sets["particle_173 geometry"] mark=s.place_marker((2560.91, 3749.63, 3066.73), (0.7, 0.7, 0.7), 149.446) if "particle_174 geometry" not in marker_sets: s=new_marker_set('particle_174 geometry') marker_sets["particle_174 geometry"]=s s= marker_sets["particle_174 geometry"] mark=s.place_marker((2846.48, 3859.08, 2907.6), (0.7, 0.7, 0.7), 126.858) if "particle_175 geometry" not in marker_sets: s=new_marker_set('particle_175 geometry') marker_sets["particle_175 geometry"]=s s= marker_sets["particle_175 geometry"] mark=s.place_marker((2897.47, 4054.32, 3160.07), (0.7, 0.7, 0.7), 106.046) if "particle_176 geometry" not in marker_sets: s=new_marker_set('particle_176 geometry') marker_sets["particle_176 geometry"]=s s= marker_sets["particle_176 geometry"] mark=s.place_marker((2586.65, 4031.73, 3554.73), (0.7, 0.7, 0.7), 156.298) if "particle_177 geometry" not in marker_sets: s=new_marker_set('particle_177 geometry') marker_sets["particle_177 geometry"]=s s= marker_sets["particle_177 geometry"] mark=s.place_marker((2321.54, 3920.47, 4073.26), (0.7, 0.7, 0.7), 231.212) if "particle_178 geometry" not in marker_sets: s=new_marker_set('particle_178 geometry') marker_sets["particle_178 geometry"]=s s= marker_sets["particle_178 geometry"] mark=s.place_marker((1851.1, 3621.29, 4126.7), (0.7, 0.7, 0.7), 88.4916) if "particle_179 geometry" not in marker_sets: s=new_marker_set('particle_179 geometry') marker_sets["particle_179 geometry"]=s s= marker_sets["particle_179 geometry"] mark=s.place_marker((1642.25, 3353.94, 3792.89), (0.7, 0.7, 0.7), 111.334) if "particle_180 geometry" not in marker_sets: s=new_marker_set('particle_180 geometry') marker_sets["particle_180 geometry"]=s s= marker_sets["particle_180 geometry"] mark=s.place_marker((1725.85, 3089.22, 3234.44), (0.7, 0.7, 0.7), 127.619) if "particle_181 geometry" not in marker_sets: s=new_marker_set('particle_181 geometry') marker_sets["particle_181 geometry"]=s s= marker_sets["particle_181 geometry"] mark=s.place_marker((1812.77, 2900.04, 2817.21), (0.7, 0.7, 0.7), 230.746) if "particle_182 geometry" not in marker_sets: s=new_marker_set('particle_182 geometry') marker_sets["particle_182 geometry"]=s s= marker_sets["particle_182 geometry"] mark=s.place_marker((2008.77, 3194.15, 3005.23), (0.7, 0.7, 0.7), 124.573) if "particle_183 geometry" not in marker_sets: s=new_marker_set('particle_183 geometry') marker_sets["particle_183 geometry"]=s s= marker_sets["particle_183 geometry"] mark=s.place_marker((2172.23, 3730.89, 3288.31), (0.7, 0.7, 0.7), 124.489) if "particle_184 geometry" not in marker_sets: s=new_marker_set('particle_184 geometry') marker_sets["particle_184 geometry"]=s s= marker_sets["particle_184 geometry"] mark=s.place_marker((2462.85, 3928.41, 3144.56), (0.7, 0.7, 0.7), 196.61) if "particle_185 geometry" not in marker_sets: s=new_marker_set('particle_185 geometry') marker_sets["particle_185 geometry"]=s s= marker_sets["particle_185 geometry"] mark=s.place_marker((2245.16, 3928.18, 2862.01), (0.7, 0.7, 0.7), 134.049) if "particle_186 geometry" not in marker_sets: s=new_marker_set('particle_186 geometry') marker_sets["particle_186 geometry"]=s s= marker_sets["particle_186 geometry"] mark=s.place_marker((1976.26, 4086.53, 2773.25), (0.7, 0.7, 0.7), 141.493) if "particle_187 geometry" not in marker_sets: s=new_marker_set('particle_187 geometry') marker_sets["particle_187 geometry"]=s s= marker_sets["particle_187 geometry"] mark=s.place_marker((1767.22, 4422.23, 2880.98), (0.7, 0.7, 0.7), 172.203) if "particle_188 geometry" not in marker_sets: s=new_marker_set('particle_188 geometry') marker_sets["particle_188 geometry"]=s s= marker_sets["particle_188 geometry"] mark=s.place_marker((2283.9, 4074.17, 2713.29), (0.7, 0.7, 0.7), 271.354) if "particle_189 geometry" not in marker_sets: s=new_marker_set('particle_189 geometry') marker_sets["particle_189 geometry"]=s s= marker_sets["particle_189 geometry"] mark=s.place_marker((2657.96, 3746, 2887.86), (0.7, 0.7, 0.7), 97.0785) if "particle_190 geometry" not in marker_sets: s=new_marker_set('particle_190 geometry') marker_sets["particle_190 geometry"]=s s= marker_sets["particle_190 geometry"] mark=s.place_marker((2902.75, 3556.01, 3186.6), (0.7, 0.7, 0.7), 151.857) if "particle_191 geometry" not in marker_sets: s=new_marker_set('particle_191 geometry') marker_sets["particle_191 geometry"]=s s= marker_sets["particle_191 geometry"] mark=s.place_marker((3351.01, 3200.84, 3361.25), (0.7, 0.7, 0.7), 199.233) if "particle_192 geometry" not in marker_sets: s=new_marker_set('particle_192 geometry') marker_sets["particle_192 geometry"]=s s= marker_sets["particle_192 geometry"] mark=s.place_marker((3334.63, 2632.95, 3176.32), (0.7, 0.7, 0.7), 118.863) if "particle_193 geometry" not in marker_sets: s=new_marker_set('particle_193 geometry') marker_sets["particle_193 geometry"]=s s= marker_sets["particle_193 geometry"] mark=s.place_marker((3462.87, 2231.84, 3358.89), (0.7, 0.7, 0.7), 172.415) if "particle_194 geometry" not in marker_sets: s=new_marker_set('particle_194 geometry') marker_sets["particle_194 geometry"]=s s= marker_sets["particle_194 geometry"] mark=s.place_marker((3763.71, 2039.67, 3750.71), (0.7, 0.7, 0.7), 134.26) if "particle_195 geometry" not in marker_sets: s=new_marker_set('particle_195 geometry') marker_sets["particle_195 geometry"]=s s= marker_sets["particle_195 geometry"] mark=s.place_marker((4414.47, 1998.59, 4430.64), (0.7, 0.7, 0.7), 139.548) if "particle_196 geometry" not in marker_sets: s=new_marker_set('particle_196 geometry') marker_sets["particle_196 geometry"]=s s= marker_sets["particle_196 geometry"] mark=s.place_marker((4054.15, 2294.57, 4718.46), (0.7, 0.7, 0.7), 196.526) if "particle_197 geometry" not in marker_sets: s=new_marker_set('particle_197 geometry') marker_sets["particle_197 geometry"]=s s= marker_sets["particle_197 geometry"] mark=s.place_marker((3403.41, 2647.95, 4463.43), (0.7, 0.7, 0.7), 136.206) if "particle_198 geometry" not in marker_sets: s=new_marker_set('particle_198 geometry') marker_sets["particle_198 geometry"]=s s= marker_sets["particle_198 geometry"] mark=s.place_marker((2617.63, 2614.73, 3881.32), (0.7, 0.7, 0.7), 152.322) if "particle_199 geometry" not in marker_sets: s=new_marker_set('particle_199 geometry') marker_sets["particle_199 geometry"]=s s= marker_sets["particle_199 geometry"] mark=s.place_marker((2212.53, 2628.45, 3352.97), (0.7, 0.7, 0.7), 126.054) if "particle_200 geometry" not in marker_sets: s=new_marker_set('particle_200 geometry') marker_sets["particle_200 geometry"]=s s= marker_sets["particle_200 geometry"] mark=s.place_marker((2210.74, 3058.8, 3286.83), (0.7, 0.7, 0.7), 164.378) if "particle_201 geometry" not in marker_sets: s=new_marker_set('particle_201 geometry') marker_sets["particle_201 geometry"]=s s= marker_sets["particle_201 geometry"] mark=s.place_marker((2459.87, 3428.03, 3210.64), (0.7, 0.7, 0.7), 122.205) if "particle_202 geometry" not in marker_sets: s=new_marker_set('particle_202 geometry') marker_sets["particle_202 geometry"]=s s= marker_sets["particle_202 geometry"] mark=s.place_marker((2811.83, 3638.16, 3033.89), (0.7, 0.7, 0.7), 134.979) if "particle_203 geometry" not in marker_sets: s=new_marker_set('particle_203 geometry') marker_sets["particle_203 geometry"]=s s= marker_sets["particle_203 geometry"] mark=s.place_marker((2972.41, 3362.28, 2851.28), (0.7, 0.7, 0.7), 136.375) if "particle_204 geometry" not in marker_sets: s=new_marker_set('particle_204 geometry') marker_sets["particle_204 geometry"]=s s= marker_sets["particle_204 geometry"] mark=s.place_marker((2972.31, 3281.95, 3111.43), (0.7, 0.7, 0.7), 151.688) if "particle_205 geometry" not in marker_sets: s=new_marker_set('particle_205 geometry') marker_sets["particle_205 geometry"]=s s= marker_sets["particle_205 geometry"] mark=s.place_marker((2895.91, 3547.93, 3080.63), (0.7, 0.7, 0.7), 116.156) if "particle_206 geometry" not in marker_sets: s=new_marker_set('particle_206 geometry') marker_sets["particle_206 geometry"]=s s= marker_sets["particle_206 geometry"] mark=s.place_marker((2494.25, 3028.9, 2818.18), (0.7, 0.7, 0.7), 122.839) if "particle_207 geometry" not in marker_sets: s=new_marker_set('particle_207 geometry') marker_sets["particle_207 geometry"]=s s= marker_sets["particle_207 geometry"] mark=s.place_marker((2158.97, 2654.06, 2905.13), (0.7, 0.7, 0.7), 164.716) if "particle_208 geometry" not in marker_sets: s=new_marker_set('particle_208 geometry') marker_sets["particle_208 geometry"]=s s= marker_sets["particle_208 geometry"] mark=s.place_marker((2441.86, 2971.02, 3661.46), (0.7, 0.7, 0.7), 303.672) if "particle_209 geometry" not in marker_sets: s=new_marker_set('particle_209 geometry') marker_sets["particle_209 geometry"]=s s= marker_sets["particle_209 geometry"] mark=s.place_marker((3161.5, 3490.18, 4217.11), (0.7, 0.7, 0.7), 220.298) if "particle_210 geometry" not in marker_sets: s=new_marker_set('particle_210 geometry') marker_sets["particle_210 geometry"]=s s= marker_sets["particle_210 geometry"] mark=s.place_marker((3205.01, 3871.06, 3722.7), (0.7, 0.7, 0.7), 175.883) if "particle_211 geometry" not in marker_sets: s=new_marker_set('particle_211 geometry') marker_sets["particle_211 geometry"]=s s= marker_sets["particle_211 geometry"] mark=s.place_marker((2805.77, 4279.72, 3381.74), (0.7, 0.7, 0.7), 233.581) if "particle_212 geometry" not in marker_sets: s=new_marker_set('particle_212 geometry') marker_sets["particle_212 geometry"]=s s= marker_sets["particle_212 geometry"] mark=s.place_marker((2038.66, 4306.57, 3337.77), (0.7, 0.7, 0.7), 231.127) if "particle_213 geometry" not in marker_sets: s=new_marker_set('particle_213 geometry') marker_sets["particle_213 geometry"]=s s= marker_sets["particle_213 geometry"] mark=s.place_marker((1683.43, 4546.03, 2900.81), (0.7, 0.7, 0.7), 247.413) if "particle_214 geometry" not in marker_sets: s=new_marker_set('particle_214 geometry') marker_sets["particle_214 geometry"]=s s= marker_sets["particle_214 geometry"] mark=s.place_marker((1710.44, 4826.96, 2315.34), (0.7, 0.7, 0.7), 200.206) if "particle_215 geometry" not in marker_sets: s=new_marker_set('particle_215 geometry') marker_sets["particle_215 geometry"]=s s= marker_sets["particle_215 geometry"] mark=s.place_marker((2051.5, 4851.4, 2087.98), (0.7, 0.7, 0.7), 150.419) if "particle_216 geometry" not in marker_sets: s=new_marker_set('particle_216 geometry') marker_sets["particle_216 geometry"]=s s= marker_sets["particle_216 geometry"] mark=s.place_marker((1795.34, 4279, 2120.87), (0.7, 0.7, 0.7), 140.14) if "particle_217 geometry" not in marker_sets: s=new_marker_set('particle_217 geometry') marker_sets["particle_217 geometry"]=s s= marker_sets["particle_217 geometry"] mark=s.place_marker((1410.27, 4001.39, 2159.02), (0.7, 0.7, 0.7), 132.949) if "particle_218 geometry" not in marker_sets: s=new_marker_set('particle_218 geometry') marker_sets["particle_218 geometry"]=s s= marker_sets["particle_218 geometry"] mark=s.place_marker((1243.87, 3656.45, 2195.36), (0.7, 0.7, 0.7), 141.113) if "particle_219 geometry" not in marker_sets: s=new_marker_set('particle_219 geometry') marker_sets["particle_219 geometry"]=s s= marker_sets["particle_219 geometry"] mark=s.place_marker((1216.25, 3694.24, 2542.62), (0.7, 0.7, 0.7), 171.526) if "particle_220 geometry" not in marker_sets: s=new_marker_set('particle_220 geometry') marker_sets["particle_220 geometry"]=s s= marker_sets["particle_220 geometry"] mark=s.place_marker((1428.34, 4084.98, 2951.07), (0.7, 0.7, 0.7), 326.937) if "particle_221 geometry" not in marker_sets: s=new_marker_set('particle_221 geometry') marker_sets["particle_221 geometry"]=s s= marker_sets["particle_221 geometry"] mark=s.place_marker((1931.23, 4185.81, 3211.42), (0.7, 0.7, 0.7), 92.0871) if "particle_222 geometry" not in marker_sets: s=new_marker_set('particle_222 geometry') marker_sets["particle_222 geometry"]=s s= marker_sets["particle_222 geometry"] mark=s.place_marker((1988.06, 3758.3, 3372.05), (0.7, 0.7, 0.7), 210.273) if "particle_223 geometry" not in marker_sets: s=new_marker_set('particle_223 geometry') marker_sets["particle_223 geometry"]=s s= marker_sets["particle_223 geometry"] mark=s.place_marker((1710.05, 3152.15, 3025.23), (0.7, 0.7, 0.7), 122.628) if "particle_224 geometry" not in marker_sets: s=new_marker_set('particle_224 geometry') marker_sets["particle_224 geometry"]=s s= marker_sets["particle_224 geometry"] mark=s.place_marker((1624.34, 2978.4, 2874.94), (0.7, 0.7, 0.7), 109.176) if "particle_225 geometry" not in marker_sets: s=new_marker_set('particle_225 geometry') marker_sets["particle_225 geometry"]=s s= marker_sets["particle_225 geometry"] mark=s.place_marker((1768.9, 3244.33, 2828.95), (0.7, 0.7, 0.7), 142.213) if "particle_226 geometry" not in marker_sets: s=new_marker_set('particle_226 geometry') marker_sets["particle_226 geometry"]=s s= marker_sets["particle_226 geometry"] mark=s.place_marker((1910.77, 3307.28, 3199.79), (0.7, 0.7, 0.7), 250.078) if "particle_227 geometry" not in marker_sets: s=new_marker_set('particle_227 geometry') marker_sets["particle_227 geometry"]=s s= marker_sets["particle_227 geometry"] mark=s.place_marker((2289.56, 3218.79, 2958.66), (0.7, 0.7, 0.7), 123.558) if "particle_228 geometry" not in marker_sets: s=new_marker_set('particle_228 geometry') marker_sets["particle_228 geometry"]=s s= marker_sets["particle_228 geometry"] mark=s.place_marker((2415.85, 2984.7, 2550.62), (0.7, 0.7, 0.7), 235.992) if "particle_229 geometry" not in marker_sets: s=new_marker_set('particle_229 geometry') marker_sets["particle_229 geometry"]=s s= marker_sets["particle_229 geometry"] mark=s.place_marker((2708.65, 2774.22, 2208.98), (0.7, 0.7, 0.7), 172.373) if "particle_230 geometry" not in marker_sets: s=new_marker_set('particle_230 geometry') marker_sets["particle_230 geometry"]=s s= marker_sets["particle_230 geometry"] mark=s.place_marker((3129.86, 2968.09, 2127.12), (0.7, 0.7, 0.7), 152.322) if "particle_231 geometry" not in marker_sets: s=new_marker_set('particle_231 geometry') marker_sets["particle_231 geometry"]=s s= marker_sets["particle_231 geometry"] mark=s.place_marker((3413.21, 3045.14, 2356.83), (0.7, 0.7, 0.7), 196.653) if "particle_232 geometry" not in marker_sets: s=new_marker_set('particle_232 geometry') marker_sets["particle_232 geometry"]=s s= marker_sets["particle_232 geometry"] mark=s.place_marker((3211.26, 2924.09, 2150.68), (0.7, 0.7, 0.7), 134.091) if "particle_233 geometry" not in marker_sets: s=new_marker_set('particle_233 geometry') marker_sets["particle_233 geometry"]=s s= marker_sets["particle_233 geometry"] mark=s.place_marker((2971.71, 2958.89, 1917.6), (0.7, 0.7, 0.7), 180.325) if "particle_234 geometry" not in marker_sets: s=new_marker_set('particle_234 geometry') marker_sets["particle_234 geometry"]=s s= marker_sets["particle_234 geometry"] mark=s.place_marker((2730.29, 2836.98, 2304.31), (0.7, 0.7, 0.7), 218.437) if "particle_235 geometry" not in marker_sets: s=new_marker_set('particle_235 geometry') marker_sets["particle_235 geometry"]=s s= marker_sets["particle_235 geometry"] mark=s.place_marker((2730.09, 3153.79, 2634.31), (0.7, 0.7, 0.7), 148.008) if "particle_236 geometry" not in marker_sets: s=new_marker_set('particle_236 geometry') marker_sets["particle_236 geometry"]=s s= marker_sets["particle_236 geometry"] mark=s.place_marker((2834.17, 3741.37, 2888.36), (0.7, 0.7, 0.7), 191.873) if "particle_237 geometry" not in marker_sets: s=new_marker_set('particle_237 geometry') marker_sets["particle_237 geometry"]=s s= marker_sets["particle_237 geometry"] mark=s.place_marker((2698.47, 4283.02, 2928.01), (0.7, 0.7, 0.7), 138.575) if "particle_238 geometry" not in marker_sets: s=new_marker_set('particle_238 geometry') marker_sets["particle_238 geometry"]=s s= marker_sets["particle_238 geometry"] mark=s.place_marker((2930.34, 4586.44, 2749.2), (0.7, 0.7, 0.7), 161.205) if "particle_239 geometry" not in marker_sets: s=new_marker_set('particle_239 geometry') marker_sets["particle_239 geometry"]=s s= marker_sets["particle_239 geometry"] mark=s.place_marker((3061.5, 4201.68, 2985.73), (0.7, 0.7, 0.7), 288.021) if "particle_240 geometry" not in marker_sets: s=new_marker_set('particle_240 geometry') marker_sets["particle_240 geometry"]=s s= marker_sets["particle_240 geometry"] mark=s.place_marker((3084.32, 3720.69, 2445.35), (0.7, 0.7, 0.7), 227.405) if "particle_241 geometry" not in marker_sets: s=new_marker_set('particle_241 geometry') marker_sets["particle_241 geometry"]=s s= marker_sets["particle_241 geometry"] mark=s.place_marker((3002.36, 3243.98, 2257.1), (0.7, 0.7, 0.7), 126.519) if "particle_242 geometry" not in marker_sets: s=new_marker_set('particle_242 geometry') marker_sets["particle_242 geometry"]=s s= marker_sets["particle_242 geometry"] mark=s.place_marker((3170.95, 3308.47, 2497.23), (0.7, 0.7, 0.7), 117.975) if "particle_243 geometry" not in marker_sets: s=new_marker_set('particle_243 geometry') marker_sets["particle_243 geometry"]=s s= marker_sets["particle_243 geometry"] mark=s.place_marker((2950.95, 3047.35, 2553.24), (0.7, 0.7, 0.7), 200.883) if "particle_244 geometry" not in marker_sets: s=new_marker_set('particle_244 geometry') marker_sets["particle_244 geometry"]=s s= marker_sets["particle_244 geometry"] mark=s.place_marker((2625.98, 2947.94, 2258.07), (0.7, 0.7, 0.7), 158.794) if "particle_245 geometry" not in marker_sets: s=new_marker_set('particle_245 geometry') marker_sets["particle_245 geometry"]=s s= marker_sets["particle_245 geometry"] mark=s.place_marker((2510.21, 3097.6, 2062.39), (0.7, 0.7, 0.7), 115.86) if "particle_246 geometry" not in marker_sets: s=new_marker_set('particle_246 geometry') marker_sets["particle_246 geometry"]=s s= marker_sets["particle_246 geometry"] mark=s.place_marker((2372.69, 2911.41, 2003.73), (0.7, 0.7, 0.7), 133.034) if "particle_247 geometry" not in marker_sets: s=new_marker_set('particle_247 geometry') marker_sets["particle_247 geometry"]=s s= marker_sets["particle_247 geometry"] mark=s.place_marker((2596.66, 2500.71, 2094.1), (0.7, 0.7, 0.7), 314.627) if "particle_248 geometry" not in marker_sets: s=new_marker_set('particle_248 geometry') marker_sets["particle_248 geometry"]=s s= marker_sets["particle_248 geometry"] mark=s.place_marker((2794.64, 2796.86, 2181.87), (0.7, 0.7, 0.7), 115.352) if "particle_249 geometry" not in marker_sets: s=new_marker_set('particle_249 geometry') marker_sets["particle_249 geometry"]=s s= marker_sets["particle_249 geometry"] mark=s.place_marker((2862.56, 3220.38, 2124.99), (0.7, 0.7, 0.7), 180.621) if "particle_250 geometry" not in marker_sets: s=new_marker_set('particle_250 geometry') marker_sets["particle_250 geometry"]=s s= marker_sets["particle_250 geometry"] mark=s.place_marker((2580.32, 3363.02, 1932.67), (0.7, 0.7, 0.7), 126.265) if "particle_251 geometry" not in marker_sets: s=new_marker_set('particle_251 geometry') marker_sets["particle_251 geometry"]=s s= marker_sets["particle_251 geometry"] mark=s.place_marker((2193.78, 3301.72, 1905.73), (0.7, 0.7, 0.7), 133.541) if "particle_252 geometry" not in marker_sets: s=new_marker_set('particle_252 geometry') marker_sets["particle_252 geometry"]=s s= marker_sets["particle_252 geometry"] mark=s.place_marker((1770.24, 3434.13, 1895.41), (0.7, 0.7, 0.7), 171.019) if "particle_253 geometry" not in marker_sets: s=new_marker_set('particle_253 geometry') marker_sets["particle_253 geometry"]=s s= marker_sets["particle_253 geometry"] mark=s.place_marker((1446.34, 3681.96, 1914.29), (0.7, 0.7, 0.7), 115.437) if "particle_254 geometry" not in marker_sets: s=new_marker_set('particle_254 geometry') marker_sets["particle_254 geometry"]=s s= marker_sets["particle_254 geometry"] mark=s.place_marker((1688.03, 3635.38, 1732.03), (0.7, 0.7, 0.7), 158.583) if "particle_255 geometry" not in marker_sets: s=new_marker_set('particle_255 geometry') marker_sets["particle_255 geometry"]=s s= marker_sets["particle_255 geometry"] mark=s.place_marker((1807.9, 3276.94, 1972.76), (0.7, 0.7, 0.7), 192) if "particle_256 geometry" not in marker_sets: s=new_marker_set('particle_256 geometry') marker_sets["particle_256 geometry"]=s s= marker_sets["particle_256 geometry"] mark=s.place_marker((2066.24, 2926.3, 2058.07), (0.7, 0.7, 0.7), 150.165) if "particle_257 geometry" not in marker_sets: s=new_marker_set('particle_257 geometry') marker_sets["particle_257 geometry"]=s s= marker_sets["particle_257 geometry"] mark=s.place_marker((1926.97, 2798.83, 2149.36), (0.7, 0.7, 0.7), 157.567) if "particle_258 geometry" not in marker_sets: s=new_marker_set('particle_258 geometry') marker_sets["particle_258 geometry"]=s s= marker_sets["particle_258 geometry"] mark=s.place_marker((1958.83, 2740.04, 2259.73), (0.7, 0.7, 0.7), 199.36) if "particle_259 geometry" not in marker_sets: s=new_marker_set('particle_259 geometry') marker_sets["particle_259 geometry"]=s s= marker_sets["particle_259 geometry"] mark=s.place_marker((2051.06, 3148.72, 2463.98), (0.7, 0.7, 0.7), 105.369) if "particle_260 geometry" not in marker_sets: s=new_marker_set('particle_260 geometry') marker_sets["particle_260 geometry"]=s s= marker_sets["particle_260 geometry"] mark=s.place_marker((2163.45, 3278.09, 2677.17), (0.7, 0.7, 0.7), 118.651) if "particle_261 geometry" not in marker_sets: s=new_marker_set('particle_261 geometry') marker_sets["particle_261 geometry"]=s s= marker_sets["particle_261 geometry"] mark=s.place_marker((2296.46, 2881.28, 2517.86), (0.7, 0.7, 0.7), 219.664) if "particle_262 geometry" not in marker_sets: s=new_marker_set('particle_262 geometry') marker_sets["particle_262 geometry"]=s s= marker_sets["particle_262 geometry"] mark=s.place_marker((2254.52, 2356.08, 2244.67), (0.7, 0.7, 0.7), 196.018) if "particle_263 geometry" not in marker_sets: s=new_marker_set('particle_263 geometry') marker_sets["particle_263 geometry"]=s s= marker_sets["particle_263 geometry"] mark=s.place_marker((2230.75, 1883.53, 2088.25), (0.7, 0.7, 0.7), 218.141) if "particle_264 geometry" not in marker_sets: s=new_marker_set('particle_264 geometry') marker_sets["particle_264 geometry"]=s s= marker_sets["particle_264 geometry"] mark=s.place_marker((1926.58, 1806.5, 2255.63), (0.7, 0.7, 0.7), 181.636) if "particle_265 geometry" not in marker_sets: s=new_marker_set('particle_265 geometry') marker_sets["particle_265 geometry"]=s s= marker_sets["particle_265 geometry"] mark=s.place_marker((1845.48, 2044.86, 2412.87), (0.7, 0.7, 0.7), 195.003) if "particle_266 geometry" not in marker_sets: s=new_marker_set('particle_266 geometry') marker_sets["particle_266 geometry"]=s s= marker_sets["particle_266 geometry"] mark=s.place_marker((1870.82, 1880.69, 2219.25), (0.7, 0.7, 0.7), 139.209) if "particle_267 geometry" not in marker_sets: s=new_marker_set('particle_267 geometry') marker_sets["particle_267 geometry"]=s s= marker_sets["particle_267 geometry"] mark=s.place_marker((1809.74, 1885.09, 2162.45), (0.7, 0.7, 0.7), 189.885) if "particle_268 geometry" not in marker_sets: s=new_marker_set('particle_268 geometry') marker_sets["particle_268 geometry"]=s s= marker_sets["particle_268 geometry"] mark=s.place_marker((1883.34, 2119.47, 1982.25), (0.7, 0.7, 0.7), 267.674) if "particle_269 geometry" not in marker_sets: s=new_marker_set('particle_269 geometry') marker_sets["particle_269 geometry"]=s s= marker_sets["particle_269 geometry"] mark=s.place_marker((2157.4, 2483.9, 1647.96), (0.7, 0.7, 0.7), 196.568) if "particle_270 geometry" not in marker_sets: s=new_marker_set('particle_270 geometry') marker_sets["particle_270 geometry"]=s s= marker_sets["particle_270 geometry"] mark=s.place_marker((1857.94, 2507.39, 1570.65), (0.7, 0.7, 0.7), 192.423) if "particle_271 geometry" not in marker_sets: s=new_marker_set('particle_271 geometry') marker_sets["particle_271 geometry"]=s s= marker_sets["particle_271 geometry"] mark=s.place_marker((1647.24, 2188.34, 1601.7), (1, 0.7, 0), 202.405) if "particle_272 geometry" not in marker_sets: s=new_marker_set('particle_272 geometry') marker_sets["particle_272 geometry"]=s s= marker_sets["particle_272 geometry"] mark=s.place_marker((2058.4, 2898.62, 1409.92), (0.7, 0.7, 0.7), 135.529) if "particle_273 geometry" not in marker_sets: s=new_marker_set('particle_273 geometry') marker_sets["particle_273 geometry"]=s s= marker_sets["particle_273 geometry"] mark=s.place_marker((2385.76, 3791.41, 1167.14), (0.7, 0.7, 0.7), 114.21) if "particle_274 geometry" not in marker_sets: s=new_marker_set('particle_274 geometry') marker_sets["particle_274 geometry"]=s s= marker_sets["particle_274 geometry"] mark=s.place_marker((2396.17, 3896.83, 1473.12), (0.7, 0.7, 0.7), 159.133) if "particle_275 geometry" not in marker_sets: s=new_marker_set('particle_275 geometry') marker_sets["particle_275 geometry"]=s s= marker_sets["particle_275 geometry"] mark=s.place_marker((2528.13, 3650.81, 1773.72), (0.7, 0.7, 0.7), 144.412) if "particle_276 geometry" not in marker_sets: s=new_marker_set('particle_276 geometry') marker_sets["particle_276 geometry"]=s s= marker_sets["particle_276 geometry"] mark=s.place_marker((2654.31, 3442.44, 1983.53), (0.7, 0.7, 0.7), 70.8525) if "particle_277 geometry" not in marker_sets: s=new_marker_set('particle_277 geometry') marker_sets["particle_277 geometry"]=s s= marker_sets["particle_277 geometry"] mark=s.place_marker((2387.09, 2894.9, 2121.29), (0.7, 0.7, 0.7), 141.874) if "particle_278 geometry" not in marker_sets: s=new_marker_set('particle_278 geometry') marker_sets["particle_278 geometry"]=s s= marker_sets["particle_278 geometry"] mark=s.place_marker((2079.91, 2363.92, 2160.95), (0.7, 0.7, 0.7), 217.337) if "particle_279 geometry" not in marker_sets: s=new_marker_set('particle_279 geometry') marker_sets["particle_279 geometry"]=s s= marker_sets["particle_279 geometry"] mark=s.place_marker((2119.55, 2310.72, 2131.9), (0.7, 0.7, 0.7), 237.641) if "particle_280 geometry" not in marker_sets: s=new_marker_set('particle_280 geometry') marker_sets["particle_280 geometry"]=s s= marker_sets["particle_280 geometry"] mark=s.place_marker((2542.26, 2494.47, 2142.28), (0.7, 0.7, 0.7), 229.393) if "particle_281 geometry" not in marker_sets: s=new_marker_set('particle_281 geometry') marker_sets["particle_281 geometry"]=s s= marker_sets["particle_281 geometry"] mark=s.place_marker((2682.54, 1922.59, 1986.39), (0.7, 0.7, 0.7), 349.906) if "particle_282 geometry" not in marker_sets: s=new_marker_set('particle_282 geometry') marker_sets["particle_282 geometry"]=s s= marker_sets["particle_282 geometry"] mark=s.place_marker((2556.22, 1518.24, 1597.25), (0.7, 0.7, 0.7), 162.347) if "particle_283 geometry" not in marker_sets: s=new_marker_set('particle_283 geometry') marker_sets["particle_283 geometry"]=s s= marker_sets["particle_283 geometry"] mark=s.place_marker((2592, 1429.08, 1447.38), (0.7, 0.7, 0.7), 194.072) if "particle_284 geometry" not in marker_sets: s=new_marker_set('particle_284 geometry') marker_sets["particle_284 geometry"]=s s= marker_sets["particle_284 geometry"] mark=s.place_marker((2774.65, 1430.94, 1440.18), (0.7, 0.7, 0.7), 242.21) if "particle_285 geometry" not in marker_sets: s=new_marker_set('particle_285 geometry') marker_sets["particle_285 geometry"]=s s= marker_sets["particle_285 geometry"] mark=s.place_marker((3104.8, 1757.22, 1159.33), (0.7, 0.7, 0.7), 320.93) if "particle_286 geometry" not in marker_sets: s=new_marker_set('particle_286 geometry') marker_sets["particle_286 geometry"]=s s= marker_sets["particle_286 geometry"] mark=s.place_marker((3416.54, 1582.69, 694.364), (0.7, 0.7, 0.7), 226.432) if "particle_287 geometry" not in marker_sets: s=new_marker_set('particle_287 geometry') marker_sets["particle_287 geometry"]=s s= marker_sets["particle_287 geometry"] mark=s.place_marker((3331.76, 1168.07, 845.27), (0.7, 0.7, 0.7), 125.208) if "particle_288 geometry" not in marker_sets: s=new_marker_set('particle_288 geometry') marker_sets["particle_288 geometry"]=s s= marker_sets["particle_288 geometry"] mark=s.place_marker((3267.04, 925.347, 1392.09), (0.7, 0.7, 0.7), 197.837) if "particle_289 geometry" not in marker_sets: s=new_marker_set('particle_289 geometry') marker_sets["particle_289 geometry"]=s s= marker_sets["particle_289 geometry"] mark=s.place_marker((3782.71, 673.38, 1725.13), (0.7, 0.7, 0.7), 167.804) if "particle_290 geometry" not in marker_sets: s=new_marker_set('particle_290 geometry') marker_sets["particle_290 geometry"]=s s= marker_sets["particle_290 geometry"] mark=s.place_marker((4559.55, 375.491, 1823.34), (0.7, 0.7, 0.7), 136.84) if "particle_291 geometry" not in marker_sets: s=new_marker_set('particle_291 geometry') marker_sets["particle_291 geometry"]=s s= marker_sets["particle_291 geometry"] mark=s.place_marker((4780.28, 716.346, 1635.29), (0.7, 0.7, 0.7), 85.7421) if "particle_292 geometry" not in marker_sets: s=new_marker_set('particle_292 geometry') marker_sets["particle_292 geometry"]=s s= marker_sets["particle_292 geometry"] mark=s.place_marker((3547.39, 1214.35, 1165.52), (1, 0.7, 0), 256) if "particle_293 geometry" not in marker_sets: s=new_marker_set('particle_293 geometry') marker_sets["particle_293 geometry"]=s s= marker_sets["particle_293 geometry"] mark=s.place_marker((4005.69, 507.964, 1916.01), (0.7, 0.7, 0.7), 138.702) if "particle_294 geometry" not in marker_sets: s=new_marker_set('particle_294 geometry') marker_sets["particle_294 geometry"]=s s= marker_sets["particle_294 geometry"] mark=s.place_marker((4067.99, 114.777, 2180.68), (0.7, 0.7, 0.7), 140.732) if "particle_295 geometry" not in marker_sets: s=new_marker_set('particle_295 geometry') marker_sets["particle_295 geometry"]=s s= marker_sets["particle_295 geometry"] mark=s.place_marker((4036.02, 176.803, 1872.65), (0.7, 0.7, 0.7), 81.3006) if "particle_296 geometry" not in marker_sets: s=new_marker_set('particle_296 geometry') marker_sets["particle_296 geometry"]=s s= marker_sets["particle_296 geometry"] mark=s.place_marker((4305.18, 106.621, 1525.58), (0.7, 0.7, 0.7), 133.837) if "particle_297 geometry" not in marker_sets: s=new_marker_set('particle_297 geometry') marker_sets["particle_297 geometry"]=s s= marker_sets["particle_297 geometry"] mark=s.place_marker((3881.22, 536.155, 1358.37), (0.7, 0.7, 0.7), 98.3475) if "particle_298 geometry" not in marker_sets: s=new_marker_set('particle_298 geometry') marker_sets["particle_298 geometry"]=s s= marker_sets["particle_298 geometry"] mark=s.place_marker((3167.12, 938.811, 1414.95), (0.7, 0.7, 0.7), 297.623) if "particle_299 geometry" not in marker_sets: s=new_marker_set('particle_299 geometry') marker_sets["particle_299 geometry"]=s s= marker_sets["particle_299 geometry"] mark=s.place_marker((2942, 1283.71, 1318.18), (0.7, 0.7, 0.7), 212.938) if "particle_300 geometry" not in marker_sets: s=new_marker_set('particle_300 geometry') marker_sets["particle_300 geometry"]=s s= marker_sets["particle_300 geometry"] mark=s.place_marker((2843.36, 1129.67, 1186.21), (0.7, 0.7, 0.7), 154.183) if "particle_301 geometry" not in marker_sets: s=new_marker_set('particle_301 geometry') marker_sets["particle_301 geometry"]=s s= marker_sets["particle_301 geometry"] mark=s.place_marker((2961.3, 1277.69, 814.238), (0.7, 0.7, 0.7), 180.832) if "particle_302 geometry" not in marker_sets: s=new_marker_set('particle_302 geometry') marker_sets["particle_302 geometry"]=s s= marker_sets["particle_302 geometry"] mark=s.place_marker((3164.68, 1555.15, 656.899), (0.7, 0.7, 0.7), 122.332) if "particle_303 geometry" not in marker_sets: s=new_marker_set('particle_303 geometry') marker_sets["particle_303 geometry"]=s s= marker_sets["particle_303 geometry"] mark=s.place_marker((3414.46, 1842.39, 648.852), (0.7, 0.7, 0.7), 209.047) if "particle_304 geometry" not in marker_sets: s=new_marker_set('particle_304 geometry') marker_sets["particle_304 geometry"]=s s= marker_sets["particle_304 geometry"] mark=s.place_marker((3251.07, 1804.12, 276.01), (0.7, 0.7, 0.7), 126.985) if "particle_305 geometry" not in marker_sets: s=new_marker_set('particle_305 geometry') marker_sets["particle_305 geometry"]=s s= marker_sets["particle_305 geometry"] mark=s.place_marker((3394.97, 1685.59, -93.6133), (0.7, 0.7, 0.7), 122.205) if "particle_306 geometry" not in marker_sets: s=new_marker_set('particle_306 geometry') marker_sets["particle_306 geometry"]=s s= marker_sets["particle_306 geometry"] mark=s.place_marker((3484.77, 1441.38, -158.329), (0.7, 0.7, 0.7), 107.95) if "particle_307 geometry" not in marker_sets: s=new_marker_set('particle_307 geometry') marker_sets["particle_307 geometry"]=s s= marker_sets["particle_307 geometry"] mark=s.place_marker((3115.92, 1433.46, 318.586), (0.7, 0.7, 0.7), 182.567) if "particle_308 geometry" not in marker_sets: s=new_marker_set('particle_308 geometry') marker_sets["particle_308 geometry"]=s s= marker_sets["particle_308 geometry"] mark=s.place_marker((2904.63, 1428.73, 915.569), (0.7, 0.7, 0.7), 185.274) if "particle_309 geometry" not in marker_sets: s=new_marker_set('particle_309 geometry') marker_sets["particle_309 geometry"]=s s= marker_sets["particle_309 geometry"] mark=s.place_marker((2962.23, 1601.02, 1350.81), (0.7, 0.7, 0.7), 413.567) if "particle_310 geometry" not in marker_sets: s=new_marker_set('particle_310 geometry') marker_sets["particle_310 geometry"]=s s= marker_sets["particle_310 geometry"] mark=s.place_marker((2773.64, 1522.26, 1465.18), (0.7, 0.7, 0.7), 240.01) if "particle_311 geometry" not in marker_sets: s=new_marker_set('particle_311 geometry') marker_sets["particle_311 geometry"]=s s= marker_sets["particle_311 geometry"] mark=s.place_marker((2800.06, 1554.59, 1445.84), (0.7, 0.7, 0.7), 238.995) if "particle_312 geometry" not in marker_sets: s=new_marker_set('particle_312 geometry') marker_sets["particle_312 geometry"]=s s= marker_sets["particle_312 geometry"] mark=s.place_marker((2745.66, 1413.48, 1212.44), (0.7, 0.7, 0.7), 203.674) if "particle_313 geometry" not in marker_sets: s=new_marker_set('particle_313 geometry') marker_sets["particle_313 geometry"]=s s= marker_sets["particle_313 geometry"] mark=s.place_marker((2509.81, 1517.86, 664.663), (0.7, 0.7, 0.7), 266.744) if "particle_314 geometry" not in marker_sets: s=new_marker_set('particle_314 geometry') marker_sets["particle_314 geometry"]=s s= marker_sets["particle_314 geometry"] mark=s.place_marker((2472.28, 1063.28, 664.315), (0.7, 0.7, 0.7), 147.585) if "particle_315 geometry" not in marker_sets: s=new_marker_set('particle_315 geometry') marker_sets["particle_315 geometry"]=s s= marker_sets["particle_315 geometry"] mark=s.place_marker((2517.42, 1186.17, 906.926), (0.7, 0.7, 0.7), 249.485) if "particle_316 geometry" not in marker_sets: s=new_marker_set('particle_316 geometry') marker_sets["particle_316 geometry"]=s s= marker_sets["particle_316 geometry"] mark=s.place_marker((2576.79, 1603.8, 983.446), (0.7, 0.7, 0.7), 119.371) if "particle_317 geometry" not in marker_sets: s=new_marker_set('particle_317 geometry') marker_sets["particle_317 geometry"]=s s= marker_sets["particle_317 geometry"] mark=s.place_marker((3052.2, 2118.84, 883.485), (0.7, 0.7, 0.7), 155.875) if "particle_318 geometry" not in marker_sets: s=new_marker_set('particle_318 geometry') marker_sets["particle_318 geometry"]=s s= marker_sets["particle_318 geometry"] mark=s.place_marker((3794.99, 2286.83, 1000.54), (0.7, 0.7, 0.7), 189.419) if "particle_319 geometry" not in marker_sets: s=new_marker_set('particle_319 geometry') marker_sets["particle_319 geometry"]=s s= marker_sets["particle_319 geometry"] mark=s.place_marker((4012.55, 2104.33, 1486.97), (0.7, 0.7, 0.7), 137.475) if "particle_320 geometry" not in marker_sets: s=new_marker_set('particle_320 geometry') marker_sets["particle_320 geometry"]=s s= marker_sets["particle_320 geometry"] mark=s.place_marker((3940.06, 1909.25, 1955.97), (0.7, 0.7, 0.7), 176.179) if "particle_321 geometry" not in marker_sets: s=new_marker_set('particle_321 geometry') marker_sets["particle_321 geometry"]=s s= marker_sets["particle_321 geometry"] mark=s.place_marker((4018.87, 1660.56, 2354.16), (0.7, 0.7, 0.7), 138.829) if "particle_322 geometry" not in marker_sets: s=new_marker_set('particle_322 geometry') marker_sets["particle_322 geometry"]=s s= marker_sets["particle_322 geometry"] mark=s.place_marker((4179.67, 1359.17, 2612.03), (0.7, 0.7, 0.7), 148.727) if "particle_323 geometry" not in marker_sets: s=new_marker_set('particle_323 geometry') marker_sets["particle_323 geometry"]=s s= marker_sets["particle_323 geometry"] mark=s.place_marker((4543.21, 1020.42, 2788.74), (0.7, 0.7, 0.7), 230.323) if "particle_324 geometry" not in marker_sets: s=new_marker_set('particle_324 geometry') marker_sets["particle_324 geometry"]=s s= marker_sets["particle_324 geometry"] mark=s.place_marker((4312.12, 1296.57, 2275.42), (0.7, 0.7, 0.7), 175.376) if "particle_325 geometry" not in marker_sets: s=new_marker_set('particle_325 geometry') marker_sets["particle_325 geometry"]=s s= marker_sets["particle_325 geometry"] mark=s.place_marker((3984.11, 1560.66, 2004.96), (0.7, 0.7, 0.7), 161.163) if "particle_326 geometry" not in marker_sets: s=new_marker_set('particle_326 geometry') marker_sets["particle_326 geometry"]=s s= marker_sets["particle_326 geometry"] mark=s.place_marker((4176.87, 1956.64, 2184.09), (0.7, 0.7, 0.7), 125.885) if "particle_327 geometry" not in marker_sets: s=new_marker_set('particle_327 geometry') marker_sets["particle_327 geometry"]=s s= marker_sets["particle_327 geometry"] mark=s.place_marker((4476.7, 2302.74, 2170.14), (0.7, 0.7, 0.7), 206.635) if "particle_328 geometry" not in marker_sets: s=new_marker_set('particle_328 geometry') marker_sets["particle_328 geometry"]=s s= marker_sets["particle_328 geometry"] mark=s.place_marker((4130.08, 2225.81, 2459.21), (0.7, 0.7, 0.7), 151.392) if "particle_329 geometry" not in marker_sets: s=new_marker_set('particle_329 geometry') marker_sets["particle_329 geometry"]=s s= marker_sets["particle_329 geometry"] mark=s.place_marker((3872.15, 2059.52, 2668.3), (0.7, 0.7, 0.7), 173.388) if "particle_330 geometry" not in marker_sets: s=new_marker_set('particle_330 geometry') marker_sets["particle_330 geometry"]=s s= marker_sets["particle_330 geometry"] mark=s.place_marker((4028.97, 1787.67, 2770.83), (0.7, 0.7, 0.7), 135.825) if "particle_331 geometry" not in marker_sets: s=new_marker_set('particle_331 geometry') marker_sets["particle_331 geometry"]=s s= marker_sets["particle_331 geometry"] mark=s.place_marker((4363.28, 1526.56, 2802.81), (0.7, 0.7, 0.7), 186.839) if "particle_332 geometry" not in marker_sets: s=new_marker_set('particle_332 geometry') marker_sets["particle_332 geometry"]=s s= marker_sets["particle_332 geometry"] mark=s.place_marker((4757.29, 1274.75, 2806.78), (0.7, 0.7, 0.7), 121.189) if "particle_333 geometry" not in marker_sets: s=new_marker_set('particle_333 geometry') marker_sets["particle_333 geometry"]=s s= marker_sets["particle_333 geometry"] mark=s.place_marker((4510.11, 1481.63, 2530.81), (0.7, 0.7, 0.7), 102.916) if "particle_334 geometry" not in marker_sets: s=new_marker_set('particle_334 geometry') marker_sets["particle_334 geometry"]=s s= marker_sets["particle_334 geometry"] mark=s.place_marker((4085.02, 1667.71, 2116.36), (0.7, 0.7, 0.7), 212.769) if "particle_335 geometry" not in marker_sets: s=new_marker_set('particle_335 geometry') marker_sets["particle_335 geometry"]=s s= marker_sets["particle_335 geometry"] mark=s.place_marker((3488.82, 1909.6, 1901.44), (0.7, 0.7, 0.7), 173.092) if "particle_336 geometry" not in marker_sets: s=new_marker_set('particle_336 geometry') marker_sets["particle_336 geometry"]=s s= marker_sets["particle_336 geometry"] mark=s.place_marker((3199.7, 2121.89, 1540.97), (0.7, 0.7, 0.7), 264.502) if "particle_337 geometry" not in marker_sets: s=new_marker_set('particle_337 geometry') marker_sets["particle_337 geometry"]=s s= marker_sets["particle_337 geometry"] mark=s.place_marker((3279.77, 2296.39, 1014.12), (0.7, 0.7, 0.7), 208.666) if "particle_338 geometry" not in marker_sets: s=new_marker_set('particle_338 geometry') marker_sets["particle_338 geometry"]=s s= marker_sets["particle_338 geometry"] mark=s.place_marker((3368.72, 2186.06, 535.527), (0.7, 0.7, 0.7), 186.797) if "particle_339 geometry" not in marker_sets: s=new_marker_set('particle_339 geometry') marker_sets["particle_339 geometry"]=s s= marker_sets["particle_339 geometry"] mark=s.place_marker((2987.72, 1929.17, 318.969), (0.7, 0.7, 0.7), 255.534) if "particle_340 geometry" not in marker_sets: s=new_marker_set('particle_340 geometry') marker_sets["particle_340 geometry"]=s s= marker_sets["particle_340 geometry"] mark=s.place_marker((3026.6, 1523.7, 183.112), (0.7, 0.7, 0.7), 153.126) if "particle_341 geometry" not in marker_sets: s=new_marker_set('particle_341 geometry') marker_sets["particle_341 geometry"]=s s= marker_sets["particle_341 geometry"] mark=s.place_marker((3229.09, 1734.52, -82.024), (0.7, 0.7, 0.7), 165.816) if "particle_342 geometry" not in marker_sets: s=new_marker_set('particle_342 geometry') marker_sets["particle_342 geometry"]=s s= marker_sets["particle_342 geometry"] mark=s.place_marker((2995.27, 2014.07, 64.1188), (0.7, 0.7, 0.7), 134.429) if "particle_343 geometry" not in marker_sets: s=new_marker_set('particle_343 geometry') marker_sets["particle_343 geometry"]=s s= marker_sets["particle_343 geometry"] mark=s.place_marker((3063.01, 2206.04, 409.167), (0.7, 0.7, 0.7), 178.971) if "particle_344 geometry" not in marker_sets: s=new_marker_set('particle_344 geometry') marker_sets["particle_344 geometry"]=s s= marker_sets["particle_344 geometry"] mark=s.place_marker((3495.41, 2214.81, 704.547), (0.7, 0.7, 0.7), 189.969) if "particle_345 geometry" not in marker_sets: s=new_marker_set('particle_345 geometry') marker_sets["particle_345 geometry"]=s s= marker_sets["particle_345 geometry"] mark=s.place_marker((4090.89, 2393.54, 602.974), (0.7, 0.7, 0.7), 121.359) if "particle_346 geometry" not in marker_sets: s=new_marker_set('particle_346 geometry') marker_sets["particle_346 geometry"]=s s= marker_sets["particle_346 geometry"] mark=s.place_marker((4422.72, 2347.67, 1036.91), (0.7, 0.7, 0.7), 187.262) if "particle_347 geometry" not in marker_sets: s=new_marker_set('particle_347 geometry') marker_sets["particle_347 geometry"]=s s= marker_sets["particle_347 geometry"] mark=s.place_marker((4330.61, 2257.63, 1685.03), (0.7, 0.7, 0.7), 164.335) if "particle_348 geometry" not in marker_sets: s=new_marker_set('particle_348 geometry') marker_sets["particle_348 geometry"]=s s= marker_sets["particle_348 geometry"] mark=s.place_marker((4312.5, 1823.64, 1964.8), (0.7, 0.7, 0.7), 138.363) if "particle_349 geometry" not in marker_sets: s=new_marker_set('particle_349 geometry') marker_sets["particle_349 geometry"]=s s= marker_sets["particle_349 geometry"] mark=s.place_marker((4439.73, 1576.52, 2224.02), (0.7, 0.7, 0.7), 138.49) if "particle_350 geometry" not in marker_sets: s=new_marker_set('particle_350 geometry') marker_sets["particle_350 geometry"]=s s= marker_sets["particle_350 geometry"] mark=s.place_marker((4401.24, 1840.02, 2453.79), (0.7, 0.7, 0.7), 116.325) if "particle_351 geometry" not in marker_sets: s=new_marker_set('particle_351 geometry') marker_sets["particle_351 geometry"]=s s= marker_sets["particle_351 geometry"] mark=s.place_marker((4193.59, 2146.13, 2172.55), (0.7, 0.7, 0.7), 106.511) if "particle_352 geometry" not in marker_sets: s=new_marker_set('particle_352 geometry') marker_sets["particle_352 geometry"]=s s= marker_sets["particle_352 geometry"] mark=s.place_marker((3936.4, 2237.43, 1682.24), (0.7, 0.7, 0.7), 151.096) if "particle_353 geometry" not in marker_sets: s=new_marker_set('particle_353 geometry') marker_sets["particle_353 geometry"]=s s= marker_sets["particle_353 geometry"] mark=s.place_marker((3801.79, 2248.47, 1006.35), (0.7, 0.7, 0.7), 240.856) if "particle_354 geometry" not in marker_sets: s=new_marker_set('particle_354 geometry') marker_sets["particle_354 geometry"]=s s= marker_sets["particle_354 geometry"] mark=s.place_marker((3725.62, 2155.26, 491.75), (0.7, 0.7, 0.7), 149.7) if "particle_355 geometry" not in marker_sets: s=new_marker_set('particle_355 geometry') marker_sets["particle_355 geometry"]=s s= marker_sets["particle_355 geometry"] mark=s.place_marker((3450.37, 2166.86, 279.392), (0.7, 0.7, 0.7), 165.943) if "particle_356 geometry" not in marker_sets: s=new_marker_set('particle_356 geometry') marker_sets["particle_356 geometry"]=s s= marker_sets["particle_356 geometry"] mark=s.place_marker((2950, 1991.16, 586.675), (0.7, 0.7, 0.7), 178.971) if "particle_357 geometry" not in marker_sets: s=new_marker_set('particle_357 geometry') marker_sets["particle_357 geometry"]=s s= marker_sets["particle_357 geometry"] mark=s.place_marker((2219.91, 1940.11, 775.343), (0.7, 0.7, 0.7), 154.945) for k in surf_sets.keys(): chimera.openModels.add([surf_sets[k]])
gpl-3.0
YangSongzhou/django
tests/template_tests/test_extends.py
143
7141
import os from django.template import Context, Engine, TemplateDoesNotExist from django.template.loader_tags import ExtendsError from django.template.loaders.base import Loader from django.test import SimpleTestCase, ignore_warnings from django.utils.deprecation import RemovedInDjango20Warning from .utils import ROOT RECURSIVE = os.path.join(ROOT, 'recursive_templates') class ExtendsBehaviorTests(SimpleTestCase): def test_normal_extend(self): engine = Engine(dirs=[os.path.join(RECURSIVE, 'fs')]) template = engine.get_template('one.html') output = template.render(Context({})) self.assertEqual(output.strip(), 'three two one') def test_extend_recursive(self): engine = Engine(dirs=[ os.path.join(RECURSIVE, 'fs'), os.path.join(RECURSIVE, 'fs2'), os.path.join(RECURSIVE, 'fs3'), ]) template = engine.get_template('recursive.html') output = template.render(Context({})) self.assertEqual(output.strip(), 'fs3/recursive fs2/recursive fs/recursive') def test_extend_missing(self): engine = Engine(dirs=[os.path.join(RECURSIVE, 'fs')]) template = engine.get_template('extend-missing.html') with self.assertRaises(TemplateDoesNotExist) as e: template.render(Context({})) tried = e.exception.tried self.assertEqual(len(tried), 1) self.assertEqual(tried[0][0].template_name, 'missing.html') def test_recursive_multiple_loaders(self): engine = Engine( dirs=[os.path.join(RECURSIVE, 'fs')], loaders=[( 'django.template.loaders.locmem.Loader', { 'one.html': ( '{% extends "one.html" %}{% block content %}{{ block.super }} locmem-one{% endblock %}' ), 'two.html': ( '{% extends "two.html" %}{% block content %}{{ block.super }} locmem-two{% endblock %}' ), 'three.html': ( '{% extends "three.html" %}{% block content %}{{ block.super }} locmem-three{% endblock %}' ), } ), 'django.template.loaders.filesystem.Loader'], ) template = engine.get_template('one.html') output = template.render(Context({})) self.assertEqual(output.strip(), 'three locmem-three two locmem-two one locmem-one') def test_extend_self_error(self): """ Catch if a template extends itself and no other matching templates are found. """ engine = Engine(dirs=[os.path.join(RECURSIVE, 'fs')]) template = engine.get_template('self.html') with self.assertRaises(TemplateDoesNotExist): template.render(Context({})) def test_extend_cached(self): engine = Engine( dirs=[ os.path.join(RECURSIVE, 'fs'), os.path.join(RECURSIVE, 'fs2'), os.path.join(RECURSIVE, 'fs3'), ], loaders=[ ('django.template.loaders.cached.Loader', [ 'django.template.loaders.filesystem.Loader', ]), ], ) template = engine.get_template('recursive.html') output = template.render(Context({})) self.assertEqual(output.strip(), 'fs3/recursive fs2/recursive fs/recursive') cache = engine.template_loaders[0].get_template_cache self.assertEqual(len(cache), 3) expected_path = os.path.join('fs', 'recursive.html') self.assertTrue(cache['recursive.html'].origin.name.endswith(expected_path)) # Render another path that uses the same templates from the cache template = engine.get_template('other-recursive.html') output = template.render(Context({})) self.assertEqual(output.strip(), 'fs3/recursive fs2/recursive fs/recursive') # Template objects should not be duplicated. self.assertEqual(len(cache), 4) expected_path = os.path.join('fs', 'other-recursive.html') self.assertTrue(cache['other-recursive.html'].origin.name.endswith(expected_path)) def test_unique_history_per_loader(self): """ Extending should continue even if two loaders return the same name for a template. """ engine = Engine( loaders=[ ['django.template.loaders.locmem.Loader', { 'base.html': '{% extends "base.html" %}{% block content %}{{ block.super }} loader1{% endblock %}', }], ['django.template.loaders.locmem.Loader', { 'base.html': '{% block content %}loader2{% endblock %}', }], ] ) template = engine.get_template('base.html') output = template.render(Context({})) self.assertEqual(output.strip(), 'loader2 loader1') class NonRecursiveLoader(Loader): def __init__(self, engine, templates_dict): self.templates_dict = templates_dict super(NonRecursiveLoader, self).__init__(engine) def load_template_source(self, template_name, template_dirs=None): try: return self.templates_dict[template_name], template_name except KeyError: raise TemplateDoesNotExist(template_name) @ignore_warnings(category=RemovedInDjango20Warning) class NonRecursiveLoaderExtendsTests(SimpleTestCase): loaders = [ ('template_tests.test_extends.NonRecursiveLoader', { 'base.html': 'base', 'index.html': '{% extends "base.html" %}', 'recursive.html': '{% extends "recursive.html" %}', 'other-recursive.html': '{% extends "recursive.html" %}', 'a.html': '{% extends "b.html" %}', 'b.html': '{% extends "a.html" %}', }), ] def test_extend(self): engine = Engine(loaders=self.loaders) output = engine.render_to_string('index.html') self.assertEqual(output, 'base') def test_extend_cached(self): engine = Engine(loaders=[ ('django.template.loaders.cached.Loader', self.loaders), ]) output = engine.render_to_string('index.html') self.assertEqual(output, 'base') cache = engine.template_loaders[0].template_cache self.assertTrue('base.html' in cache) self.assertTrue('index.html' in cache) # Render a second time from cache output = engine.render_to_string('index.html') self.assertEqual(output, 'base') def test_extend_error(self): engine = Engine(loaders=self.loaders) msg = 'Cannot extend templates recursively when using non-recursive template loaders' with self.assertRaisesMessage(ExtendsError, msg): engine.render_to_string('recursive.html') with self.assertRaisesMessage(ExtendsError, msg): engine.render_to_string('other-recursive.html') with self.assertRaisesMessage(ExtendsError, msg): engine.render_to_string('a.html')
bsd-3-clause
timoschwarzer/blendworks
BlendWorks Server/python/Lib/encodings/big5hkscs.py
816
1039
# # big5hkscs.py: Python Unicode Codec for BIG5HKSCS # # Written by Hye-Shik Chang <perky@FreeBSD.org> # import _codecs_hk, codecs import _multibytecodec as mbc codec = _codecs_hk.getcodec('big5hkscs') class Codec(codecs.Codec): encode = codec.encode decode = codec.decode class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): codec = codec class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): codec = codec class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): codec = codec class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): codec = codec def getregentry(): return codecs.CodecInfo( name='big5hkscs', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
gpl-2.0
sebrandon1/tempest
tempest/tests/lib/fake_http.py
8
2633
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy class fake_httplib2(object): def __init__(self, return_type=None, *args, **kwargs): self.return_type = return_type def request(self, uri, method="GET", body=None, headers=None, redirections=5, connection_type=None, chunked=False): if not self.return_type: fake_headers = fake_http_response(headers) return_obj = { 'uri': uri, 'method': method, 'body': body, 'headers': headers } return (fake_headers, return_obj) elif isinstance(self.return_type, int): body = body or "fake_body" header_info = { 'content-type': 'text/plain', 'content-length': len(body) } resp_header = fake_http_response(header_info, status=self.return_type) return (resp_header, body) else: msg = "unsupported return type %s" % self.return_type raise TypeError(msg) class fake_http_response(dict): def __init__(self, headers, body=None, version=1.0, status=200, reason="Ok"): """Initialization of fake HTTP Response :param headers: dict representing HTTP response headers :param body: file-like object :param version: HTTP Version :param status: Response status code :param reason: Status code related message. """ self.body = body self.status = status self['status'] = str(self.status) self.reason = reason self.version = version self.headers = headers if headers: for key, value in headers.items(): self[key.lower()] = value def getheaders(self): return copy.deepcopy(self.headers).items() def getheader(self, key, default): return self.headers.get(key, default) def read(self, amt): return self.body.read(amt)
apache-2.0
znerol/spreadflow-observer-fs
setup.py
2
1478
from setuptools import setup tests_require = [ 'SpreadFlowCore[tests]', 'SpreadFlowDelta[tests]', 'SpreadFlowFormatBSON', 'mock', 'testtools' ] setup( name='SpreadFlowObserverFS', version='0.0.1', description='Filesystem observer for SpreadFlow metadata extraction and processing engine', author='Lorenz Schori', author_email='lo@znerol.ch', url='https://github.com/znerol/spreadflow-observer-fs', packages=[ 'spreadflow_observer_fs', 'spreadflow_observer_fs.test', 'twisted.plugins' ], package_data={ 'twisted.plugins': [ 'twisted/plugins/spreadflow_observer_fs_endpoint.py', ] }, entry_points={ 'console_scripts': [ 'spreadflow-observer-fs-default = spreadflow_observer_fs.script:main', ] }, install_requires=[ 'SpreadFlowCore', 'SpreadFlowFormatBSON', 'pathtools', 'pymongo', 'watchdog' ], tests_require=tests_require, extras_require={ 'tests': tests_require }, zip_safe=False, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Twisted', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Topic :: Multimedia' ], )
mit
rushiagr/keystone
keystone/tests/unit/core.py
4
24241
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import absolute_import import atexit import functools import logging import os import re import shutil import socket import sys import warnings import fixtures from oslo_config import cfg from oslo_config import fixture as config_fixture from oslo_log import log import oslotest.base as oslotest from oslotest import mockpatch import six from sqlalchemy import exc from testtools import testcase import webob # NOTE(ayoung) # environment.use_eventlet must run before any of the code that will # call the eventlet monkeypatching. from keystone.common import environment # noqa environment.use_eventlet() from keystone import auth from keystone.common import config as common_cfg from keystone.common import dependency from keystone.common import kvs from keystone.common.kvs import core as kvs_core from keystone import config from keystone import controllers from keystone import exception from keystone import notifications from keystone.policy.backends import rules from keystone.server import common from keystone import service from keystone.tests.unit import ksfixtures config.configure() LOG = log.getLogger(__name__) PID = six.text_type(os.getpid()) TESTSDIR = os.path.dirname(os.path.abspath(__file__)) TESTCONF = os.path.join(TESTSDIR, 'config_files') ROOTDIR = os.path.normpath(os.path.join(TESTSDIR, '..', '..', '..')) VENDOR = os.path.join(ROOTDIR, 'vendor') ETCDIR = os.path.join(ROOTDIR, 'etc') def _calc_tmpdir(): env_val = os.environ.get('KEYSTONE_TEST_TEMP_DIR') if not env_val: return os.path.join(TESTSDIR, 'tmp', PID) return os.path.join(env_val, PID) TMPDIR = _calc_tmpdir() CONF = cfg.CONF log.register_options(CONF) rules.init() IN_MEM_DB_CONN_STRING = 'sqlite://' exception._FATAL_EXCEPTION_FORMAT_ERRORS = True os.makedirs(TMPDIR) atexit.register(shutil.rmtree, TMPDIR) class dirs(object): @staticmethod def root(*p): return os.path.join(ROOTDIR, *p) @staticmethod def etc(*p): return os.path.join(ETCDIR, *p) @staticmethod def tests(*p): return os.path.join(TESTSDIR, *p) @staticmethod def tmp(*p): return os.path.join(TMPDIR, *p) @staticmethod def tests_conf(*p): return os.path.join(TESTCONF, *p) # keystone.common.sql.initialize() for testing. DEFAULT_TEST_DB_FILE = dirs.tmp('test.db') @atexit.register def remove_test_databases(): db = dirs.tmp('test.db') if os.path.exists(db): os.unlink(db) pristine = dirs.tmp('test.db.pristine') if os.path.exists(pristine): os.unlink(pristine) def generate_paste_config(extension_name): # Generate a file, based on keystone-paste.ini, that is named: # extension_name.ini, and includes extension_name in the pipeline with open(dirs.etc('keystone-paste.ini'), 'r') as f: contents = f.read() new_contents = contents.replace(' service_v3', ' %s service_v3' % (extension_name)) new_paste_file = dirs.tmp(extension_name + '.ini') with open(new_paste_file, 'w') as f: f.write(new_contents) return new_paste_file def remove_generated_paste_config(extension_name): # Remove the generated paste config file, named extension_name.ini paste_file_to_remove = dirs.tmp(extension_name + '.ini') os.remove(paste_file_to_remove) def skip_if_cache_disabled(*sections): """This decorator is used to skip a test if caching is disabled either globally or for the specific section. In the code fragment:: @skip_if_cache_is_disabled('assignment', 'token') def test_method(*args): ... The method test_method would be skipped if caching is disabled globally via the `enabled` option in the `cache` section of the configuration or if the `caching` option is set to false in either `assignment` or `token` sections of the configuration. This decorator can be used with no arguments to only check global caching. If a specified configuration section does not define the `caching` option, this decorator makes the same assumption as the `should_cache_fn` in keystone.common.cache that caching should be enabled. """ def wrapper(f): @functools.wraps(f) def inner(*args, **kwargs): if not CONF.cache.enabled: raise testcase.TestSkipped('Cache globally disabled.') for s in sections: conf_sec = getattr(CONF, s, None) if conf_sec is not None: if not getattr(conf_sec, 'caching', True): raise testcase.TestSkipped('%s caching disabled.' % s) return f(*args, **kwargs) return inner return wrapper def skip_if_no_multiple_domains_support(f): """This decorator is used to skip a test if an identity driver does not support multiple domains. """ @functools.wraps(f) def wrapper(*args, **kwargs): test_obj = args[0] if not test_obj.identity_api.multiple_domains_supported: raise testcase.TestSkipped('No multiple domains support') return f(*args, **kwargs) return wrapper class UnexpectedExit(Exception): pass class BadLog(Exception): """Raised on invalid call to logging (parameter mismatch).""" pass class TestClient(object): def __init__(self, app=None, token=None): self.app = app self.token = token def request(self, method, path, headers=None, body=None): if headers is None: headers = {} if self.token: headers.setdefault('X-Auth-Token', self.token) req = webob.Request.blank(path) req.method = method for k, v in six.iteritems(headers): req.headers[k] = v if body: req.body = body return req.get_response(self.app) def get(self, path, headers=None): return self.request('GET', path=path, headers=headers) def post(self, path, headers=None, body=None): return self.request('POST', path=path, headers=headers, body=body) def put(self, path, headers=None, body=None): return self.request('PUT', path=path, headers=headers, body=body) class BaseTestCase(oslotest.BaseTestCase): """Light weight base test class. This is a placeholder that will eventually go away once the setup/teardown in TestCase is properly trimmed down to the bare essentials. This is really just a play to speed up the tests by eliminating unnecessary work. """ def setUp(self): super(BaseTestCase, self).setUp() self.useFixture(mockpatch.PatchObject(sys, 'exit', side_effect=UnexpectedExit)) def cleanup_instance(self, *names): """Create a function suitable for use with self.addCleanup. :returns: a callable that uses a closure to delete instance attributes """ def cleanup(): for name in names: # TODO(dstanek): remove this 'if' statement once # load_backend in test_backend_ldap is only called once # per test if hasattr(self, name): delattr(self, name) return cleanup @dependency.requires('revoke_api') class TestCase(BaseTestCase): def config_files(self): return [] def config_overrides(self): signing_certfile = 'examples/pki/certs/signing_cert.pem' signing_keyfile = 'examples/pki/private/signing_key.pem' self.config_fixture.config(group='oslo_policy', policy_file=dirs.etc('policy.json')) self.config_fixture.config( # TODO(morganfainberg): Make Cache Testing a separate test case # in tempest, and move it out of the base unit tests. group='cache', backend='dogpile.cache.memory', enabled=True, proxies=['keystone.tests.unit.test_cache.CacheIsolatingProxy']) self.config_fixture.config( group='catalog', driver='keystone.catalog.backends.templated.Catalog', template_file=dirs.tests('default_catalog.templates')) self.config_fixture.config( group='identity', driver='keystone.identity.backends.sql.Identity') self.config_fixture.config( group='kvs', backends=[ ('keystone.tests.unit.test_kvs.' 'KVSBackendForcedKeyMangleFixture'), 'keystone.tests.unit.test_kvs.KVSBackendFixture']) self.config_fixture.config( group='revoke', driver='keystone.contrib.revoke.backends.kvs.Revoke') self.config_fixture.config( group='signing', certfile=signing_certfile, keyfile=signing_keyfile, ca_certs='examples/pki/certs/cacert.pem') self.config_fixture.config( group='token', driver='keystone.token.persistence.backends.kvs.Token') self.config_fixture.config( group='trust', driver='keystone.trust.backends.sql.Trust') self.config_fixture.config( group='saml', certfile=signing_certfile, keyfile=signing_keyfile) self.config_fixture.config( default_log_levels=[ 'amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'routes.middleware=INFO', 'stevedore.extension=INFO', 'keystone.notifications=INFO', 'keystone.common._memcache_pool=INFO', 'keystone.common.ldap=INFO', ]) self.auth_plugin_config_override() def auth_plugin_config_override(self, methods=None, **method_classes): if methods is None: methods = ['external', 'password', 'token', ] if not method_classes: method_classes = dict( external='keystone.auth.plugins.external.DefaultDomain', password='keystone.auth.plugins.password.Password', token='keystone.auth.plugins.token.Token', ) self.config_fixture.config(group='auth', methods=methods) common_cfg.setup_authentication() if method_classes: self.config_fixture.config(group='auth', **method_classes) def setUp(self): super(TestCase, self).setUp() self.addCleanup(self.cleanup_instance('config_fixture', 'logger')) self.addCleanup(CONF.reset) self.useFixture(mockpatch.PatchObject(logging.Handler, 'handleError', side_effect=BadLog)) self.config_fixture = self.useFixture(config_fixture.Config(CONF)) self.config(self.config_files()) # NOTE(morganfainberg): mock the auth plugin setup to use the config # fixture which automatically unregisters options when performing # cleanup. def mocked_register_auth_plugin_opt(conf, opt): self.config_fixture.register_opt(opt, group='auth') self.register_auth_plugin_opt_patch = self.useFixture( mockpatch.PatchObject(common_cfg, '_register_auth_plugin_opt', new=mocked_register_auth_plugin_opt)) self.config_overrides() self.logger = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG)) # NOTE(morganfainberg): This code is a copy from the oslo-incubator # log module. This is not in a function or otherwise available to use # without having a CONF object to setup logging. This should help to # reduce the log size by limiting what we log (similar to how Keystone # would run under mod_wsgi or eventlet). for pair in CONF.default_log_levels: mod, _sep, level_name = pair.partition('=') logger = logging.getLogger(mod) logger.setLevel(level_name) warnings.filterwarnings('error', category=DeprecationWarning, module='^keystone\\.') warnings.simplefilter('error', exc.SAWarning) self.addCleanup(warnings.resetwarnings) self.useFixture(ksfixtures.Cache()) # Clear the registry of providers so that providers from previous # tests aren't used. self.addCleanup(dependency.reset) self.addCleanup(kvs.INMEMDB.clear) # Ensure Notification subscriptions and resource types are empty self.addCleanup(notifications.clear_subscribers) self.addCleanup(notifications.reset_notifier) # Reset the auth-plugin registry self.addCleanup(self.clear_auth_plugin_registry) self.addCleanup(setattr, controllers, '_VERSIONS', []) def config(self, config_files): CONF(args=[], project='keystone', default_config_files=config_files) def load_backends(self): """Initializes each manager and assigns them to an attribute.""" # TODO(blk-u): Shouldn't need to clear the registry here, but some # tests call load_backends multiple times. These should be fixed to # only call load_backends once. dependency.reset() # TODO(morganfainberg): Shouldn't need to clear the registry here, but # some tests call load_backends multiple times. Since it is not # possible to re-configure a backend, we need to clear the list. This # should eventually be removed once testing has been cleaned up. kvs_core.KEY_VALUE_STORE_REGISTRY.clear() self.clear_auth_plugin_registry() drivers, _unused = common.setup_backends( load_extra_backends_fn=self.load_extra_backends) for manager_name, manager in six.iteritems(drivers): setattr(self, manager_name, manager) self.addCleanup(self.cleanup_instance(*drivers.keys())) def load_extra_backends(self): """Override to load managers that aren't loaded by default. This is useful to load managers initialized by extensions. No extra backends are loaded by default. :return: dict of name -> manager """ return {} def load_fixtures(self, fixtures): """Hacky basic and naive fixture loading based on a python module. Expects that the various APIs into the various services are already defined on `self`. """ # NOTE(dstanek): create a list of attribute names to be removed # from this instance during cleanup fixtures_to_cleanup = [] # TODO(termie): doing something from json, probably based on Django's # loaddata will be much preferred. if (hasattr(self, 'identity_api') and hasattr(self, 'assignment_api') and hasattr(self, 'resource_api')): for domain in fixtures.DOMAINS: try: rv = self.resource_api.create_domain(domain['id'], domain) except exception.Conflict: rv = self.resource_api.get_domain(domain['id']) except exception.NotImplemented: rv = domain attrname = 'domain_%s' % domain['id'] setattr(self, attrname, rv) fixtures_to_cleanup.append(attrname) for tenant in fixtures.TENANTS: if hasattr(self, 'tenant_%s' % tenant['id']): try: # This will clear out any roles on the project as well self.resource_api.delete_project(tenant['id']) except exception.ProjectNotFound: pass rv = self.resource_api.create_project( tenant['id'], tenant) attrname = 'tenant_%s' % tenant['id'] setattr(self, attrname, rv) fixtures_to_cleanup.append(attrname) for role in fixtures.ROLES: try: rv = self.role_api.create_role(role['id'], role) except exception.Conflict: rv = self.role_api.get_role(role['id']) attrname = 'role_%s' % role['id'] setattr(self, attrname, rv) fixtures_to_cleanup.append(attrname) for user in fixtures.USERS: user_copy = user.copy() tenants = user_copy.pop('tenants') try: existing_user = getattr(self, 'user_%s' % user['id'], None) if existing_user is not None: self.identity_api.delete_user(existing_user['id']) except exception.UserNotFound: pass # For users, the manager layer will generate the ID user_copy = self.identity_api.create_user(user_copy) # Our tests expect that the password is still in the user # record so that they can reference it, so put it back into # the dict returned. user_copy['password'] = user['password'] for tenant_id in tenants: try: self.assignment_api.add_user_to_project( tenant_id, user_copy['id']) except exception.Conflict: pass # Use the ID from the fixture as the attribute name, so # that our tests can easily reference each user dict, while # the ID in the dict will be the real public ID. attrname = 'user_%s' % user['id'] setattr(self, attrname, user_copy) fixtures_to_cleanup.append(attrname) self.addCleanup(self.cleanup_instance(*fixtures_to_cleanup)) def _paste_config(self, config): if not config.startswith('config:'): test_path = os.path.join(TESTSDIR, config) etc_path = os.path.join(ROOTDIR, 'etc', config) for path in [test_path, etc_path]: if os.path.exists('%s-paste.ini' % path): return 'config:%s-paste.ini' % path return config def loadapp(self, config, name='main'): return service.loadapp(self._paste_config(config), name=name) def clear_auth_plugin_registry(self): auth.controllers.AUTH_METHODS.clear() auth.controllers.AUTH_PLUGINS_LOADED = False def assertCloseEnoughForGovernmentWork(self, a, b, delta=3): """Asserts that two datetimes are nearly equal within a small delta. :param delta: Maximum allowable time delta, defined in seconds. """ msg = '%s != %s within %s delta' % (a, b, delta) self.assertTrue(abs(a - b).seconds <= delta, msg) def assertNotEmpty(self, l): self.assertTrue(len(l)) def assertDictEqual(self, d1, d2, msg=None): self.assertIsInstance(d1, dict) self.assertIsInstance(d2, dict) self.assertEqual(d1, d2, msg) def assertRaisesRegexp(self, expected_exception, expected_regexp, callable_obj, *args, **kwargs): """Asserts that the message in a raised exception matches a regexp. """ try: callable_obj(*args, **kwargs) except expected_exception as exc_value: if isinstance(expected_regexp, six.string_types): expected_regexp = re.compile(expected_regexp) if isinstance(exc_value.args[0], unicode): if not expected_regexp.search(unicode(exc_value)): raise self.failureException( '"%s" does not match "%s"' % (expected_regexp.pattern, unicode(exc_value))) else: if not expected_regexp.search(str(exc_value)): raise self.failureException( '"%s" does not match "%s"' % (expected_regexp.pattern, str(exc_value))) else: if hasattr(expected_exception, '__name__'): excName = expected_exception.__name__ else: excName = str(expected_exception) raise self.failureException("%s not raised" % excName) def assertDictContainsSubset(self, expected, actual, msg=None): """Checks whether actual is a superset of expected.""" def safe_repr(obj, short=False): _MAX_LENGTH = 80 try: result = repr(obj) except Exception: result = object.__repr__(obj) if not short or len(result) < _MAX_LENGTH: return result return result[:_MAX_LENGTH] + ' [truncated]...' missing = [] mismatched = [] for key, value in six.iteritems(expected): if key not in actual: missing.append(key) elif value != actual[key]: mismatched.append('%s, expected: %s, actual: %s' % (safe_repr(key), safe_repr(value), safe_repr(actual[key]))) if not (missing or mismatched): return standardMsg = '' if missing: standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in missing) if mismatched: if standardMsg: standardMsg += '; ' standardMsg += 'Mismatched values: %s' % ','.join(mismatched) self.fail(self._formatMessage(msg, standardMsg)) @property def ipv6_enabled(self): if socket.has_ipv6: sock = None try: sock = socket.socket(socket.AF_INET6) # NOTE(Mouad): Try to bind to IPv6 loopback ip address. sock.bind(("::1", 0)) return True except socket.error: pass finally: if sock: sock.close() return False def skip_if_no_ipv6(self): if not self.ipv6_enabled: raise self.skipTest("IPv6 is not enabled in the system") def skip_if_env_not_set(self, env_var): if not os.environ.get(env_var): self.skipTest('Env variable %s is not set.' % env_var) class SQLDriverOverrides(object): """A mixin for consolidating sql-specific test overrides.""" def config_overrides(self): super(SQLDriverOverrides, self).config_overrides() # SQL specific driver overrides self.config_fixture.config( group='catalog', driver='keystone.catalog.backends.sql.Catalog') self.config_fixture.config( group='identity', driver='keystone.identity.backends.sql.Identity') self.config_fixture.config( group='policy', driver='keystone.policy.backends.sql.Policy') self.config_fixture.config( group='revoke', driver='keystone.contrib.revoke.backends.sql.Revoke') self.config_fixture.config( group='token', driver='keystone.token.persistence.backends.sql.Token') self.config_fixture.config( group='trust', driver='keystone.trust.backends.sql.Trust')
apache-2.0
sdarji/lpthw
Lib/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py
548
8977
# urllib3/poolmanager.py # Copyright 2008-2014 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import logging try: # Python 3 from urllib.parse import urljoin except ImportError: from urlparse import urljoin from ._collections import RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme from .request import RequestMethods from .util import parse_url __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] pool_classes_by_scheme = { 'http': HTTPConnectionPool, 'https': HTTPSConnectionPool, } log = logging.getLogger(__name__) SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', 'ssl_version') class PoolManager(RequestMethods): """ Allows for arbitrary requests while transparently keeping track of necessary connection pools for you. :param num_pools: Number of connection pools to cache before discarding the least recently used pool. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param \**connection_pool_kw: Additional parameters are used to create fresh :class:`urllib3.connectionpool.ConnectionPool` instances. Example: :: >>> manager = PoolManager(num_pools=2) >>> r = manager.request('GET', 'http://google.com/') >>> r = manager.request('GET', 'http://google.com/mail') >>> r = manager.request('GET', 'http://yahoo.com/') >>> len(manager.pools) 2 """ proxy = None def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) self.connection_pool_kw = connection_pool_kw self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) def _new_pool(self, scheme, host, port): """ Create a new :class:`ConnectionPool` based on host, port and scheme. This method is used to actually create the connection pools handed out by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. """ pool_cls = pool_classes_by_scheme[scheme] kwargs = self.connection_pool_kw if scheme == 'http': kwargs = self.connection_pool_kw.copy() for kw in SSL_KEYWORDS: kwargs.pop(kw, None) return pool_cls(host, port, **kwargs) def clear(self): """ Empty our store of pools and direct them all to close. This will not affect in-flight connections, but they will not be re-used after completion. """ self.pools.clear() def connection_from_host(self, host, port=None, scheme='http'): """ Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. """ scheme = scheme or 'http' port = port or port_by_scheme.get(scheme, 80) pool_key = (scheme, host, port) with self.pools.lock: # If the scheme, host, or port doesn't match existing open # connections, open a new ConnectionPool. pool = self.pools.get(pool_key) if pool: return pool # Make a fresh ConnectionPool of the desired type pool = self._new_pool(scheme, host, port) self.pools[pool_key] = pool return pool def connection_from_url(self, url): """ Similar to :func:`urllib3.connectionpool.connection_from_url` but doesn't pass any additional parameters to the :class:`urllib3.connectionpool.ConnectionPool` constructor. Additional parameters are taken from the :class:`.PoolManager` constructor. """ u = parse_url(url) return self.connection_from_host(u.host, port=u.port, scheme=u.scheme) def urlopen(self, method, url, redirect=True, **kw): """ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` with custom cross-host redirect logic and only sends the request-uri portion of the ``url``. The given ``url`` parameter must be absolute, such that an appropriate :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. """ u = parse_url(url) conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) kw['assert_same_host'] = False kw['redirect'] = False if 'headers' not in kw: kw['headers'] = self.headers if self.proxy is not None and u.scheme == "http": response = conn.urlopen(method, url, **kw) else: response = conn.urlopen(method, u.request_uri, **kw) redirect_location = redirect and response.get_redirect_location() if not redirect_location: return response # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) # RFC 2616, Section 10.3.4 if response.status == 303: method = 'GET' log.info("Redirecting %s -> %s" % (url, redirect_location)) kw['retries'] = kw.get('retries', 3) - 1 # Persist retries countdown kw['redirect'] = redirect return self.urlopen(method, redirect_location, **kw) class ProxyManager(PoolManager): """ Behaves just like :class:`PoolManager`, but sends all requests through the defined proxy, using the CONNECT method for HTTPS URLs. :param proxy_url: The URL of the proxy to be used. :param proxy_headers: A dictionary contaning headers that will be sent to the proxy. In case of HTTP they are being sent with each request, while in the HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. Example: >>> proxy = urllib3.ProxyManager('http://localhost:3128/') >>> r1 = proxy.request('GET', 'http://google.com/') >>> r2 = proxy.request('GET', 'http://httpbin.org/') >>> len(proxy.pools) 1 >>> r3 = proxy.request('GET', 'https://httpbin.org/') >>> r4 = proxy.request('GET', 'https://twitter.com/') >>> len(proxy.pools) 3 """ def __init__(self, proxy_url, num_pools=10, headers=None, proxy_headers=None, **connection_pool_kw): if isinstance(proxy_url, HTTPConnectionPool): proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, proxy_url.port) proxy = parse_url(proxy_url) if not proxy.port: port = port_by_scheme.get(proxy.scheme, 80) proxy = proxy._replace(port=port) self.proxy = proxy self.proxy_headers = proxy_headers or {} assert self.proxy.scheme in ("http", "https"), \ 'Not supported proxy scheme %s' % self.proxy.scheme connection_pool_kw['_proxy'] = self.proxy connection_pool_kw['_proxy_headers'] = self.proxy_headers super(ProxyManager, self).__init__( num_pools, headers, **connection_pool_kw) def connection_from_host(self, host, port=None, scheme='http'): if scheme == "https": return super(ProxyManager, self).connection_from_host( host, port, scheme) return super(ProxyManager, self).connection_from_host( self.proxy.host, self.proxy.port, self.proxy.scheme) def _set_proxy_headers(self, url, headers=None): """ Sets headers needed by proxies: specifically, the Accept and Host headers. Only sets headers not provided by the user. """ headers_ = {'Accept': '*/*'} netloc = parse_url(url).netloc if netloc: headers_['Host'] = netloc if headers: headers_.update(headers) return headers_ def urlopen(self, method, url, redirect=True, **kw): "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." u = parse_url(url) if u.scheme == "http": # For proxied HTTPS requests, httplib sets the necessary headers # on the CONNECT to the proxy. For HTTP, we'll definitely # need to set 'Host' at the very least. kw['headers'] = self._set_proxy_headers(url, kw.get('headers', self.headers)) return super(ProxyManager, self).urlopen(method, url, redirect, **kw) def proxy_from_url(url, **kw): return ProxyManager(proxy_url=url, **kw)
unlicense
sdarji/lpthw
Lib/site-packages/pip/_vendor/requests/structures.py
277
3541
# -*- coding: utf-8 -*- """ requests.structures ~~~~~~~~~~~~~~~~~~~ Data structures that power Requests. """ import os import collections from itertools import islice class IteratorProxy(object): """docstring for IteratorProxy""" def __init__(self, i): self.i = i # self.i = chain.from_iterable(i) def __iter__(self): return self.i def __len__(self): if hasattr(self.i, '__len__'): return len(self.i) if hasattr(self.i, 'len'): return self.i.len if hasattr(self.i, 'fileno'): return os.fstat(self.i.fileno()).st_size def read(self, n): return "".join(islice(self.i, None, n)) class CaseInsensitiveDict(collections.MutableMapping): """ A case-insensitive ``dict``-like object. Implements all methods and operations of ``collections.MutableMapping`` as well as dict's ``copy``. Also provides ``lower_items``. All keys are expected to be strings. The structure remembers the case of the last key to be set, and ``iter(instance)``, ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` will contain case-sensitive keys. However, querying and contains testing is case insensitive: cid = CaseInsensitiveDict() cid['Accept'] = 'application/json' cid['aCCEPT'] == 'application/json' # True list(cid) == ['Accept'] # True For example, ``headers['content-encoding']`` will return the value of a ``'Content-Encoding'`` response header, regardless of how the header name was originally stored. If the constructor, ``.update``, or equality comparison operations are given keys that have equal ``.lower()``s, the behavior is undefined. """ def __init__(self, data=None, **kwargs): self._store = dict() if data is None: data = {} self.update(data, **kwargs) def __setitem__(self, key, value): # Use the lowercased key for lookups, but store the actual # key alongside the value. self._store[key.lower()] = (key, value) def __getitem__(self, key): return self._store[key.lower()][1] def __delitem__(self, key): del self._store[key.lower()] def __iter__(self): return (casedkey for casedkey, mappedvalue in self._store.values()) def __len__(self): return len(self._store) def lower_items(self): """Like iteritems(), but with all lowercase keys.""" return ( (lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items() ) def __eq__(self, other): if isinstance(other, collections.Mapping): other = CaseInsensitiveDict(other) else: return NotImplemented # Compare insensitively return dict(self.lower_items()) == dict(other.lower_items()) # Copy is required def copy(self): return CaseInsensitiveDict(self._store.values()) def __repr__(self): return str(dict(self.items())) class LookupDict(dict): """Dictionary lookup object.""" def __init__(self, name=None): self.name = name super(LookupDict, self).__init__() def __repr__(self): return '<lookup \'%s\'>' % (self.name) def __getitem__(self, key): # We allow fall-through here, so values default to None return self.__dict__.get(key, None) def get(self, key, default=None): return self.__dict__.get(key, default)
unlicense
AndreasHundt/mytv-channels-list
ElementTree.py
14
44136
# # ElementTree # $Id: ElementTree.py 3224 2007-08-27 21:23:39Z fredrik $ # # light-weight XML support for Python 1.5.2 and later. # # history: # 2001-10-20 fl created (from various sources) # 2001-11-01 fl return root from parse method # 2002-02-16 fl sort attributes in lexical order # 2002-04-06 fl TreeBuilder refactoring, added PythonDoc markup # 2002-05-01 fl finished TreeBuilder refactoring # 2002-07-14 fl added basic namespace support to ElementTree.write # 2002-07-25 fl added QName attribute support # 2002-10-20 fl fixed encoding in write # 2002-11-24 fl changed default encoding to ascii; fixed attribute encoding # 2002-11-27 fl accept file objects or file names for parse/write # 2002-12-04 fl moved XMLTreeBuilder back to this module # 2003-01-11 fl fixed entity encoding glitch for us-ascii # 2003-02-13 fl added XML literal factory # 2003-02-21 fl added ProcessingInstruction/PI factory # 2003-05-11 fl added tostring/fromstring helpers # 2003-05-26 fl added ElementPath support # 2003-07-05 fl added makeelement factory method # 2003-07-28 fl added more well-known namespace prefixes # 2003-08-15 fl fixed typo in ElementTree.findtext (Thomas Dartsch) # 2003-09-04 fl fall back on emulator if ElementPath is not installed # 2003-10-31 fl markup updates # 2003-11-15 fl fixed nested namespace bug # 2004-03-28 fl added XMLID helper # 2004-06-02 fl added default support to findtext # 2004-06-08 fl fixed encoding of non-ascii element/attribute names # 2004-08-23 fl take advantage of post-2.1 expat features # 2005-02-01 fl added iterparse implementation # 2005-03-02 fl fixed iterparse support for pre-2.2 versions # 2006-11-18 fl added parser support for IronPython (ElementIron) # 2007-08-27 fl fixed newlines in attributes # # Copyright (c) 1999-2007 by Fredrik Lundh. All rights reserved. # # fredrik@pythonware.com # http://www.pythonware.com # # -------------------------------------------------------------------- # The ElementTree toolkit is # # Copyright (c) 1999-2007 by Fredrik Lundh # # By obtaining, using, and/or copying this software and/or its # associated documentation, you agree that you have read, understood, # and will comply with the following terms and conditions: # # Permission to use, copy, modify, and distribute this software and # its associated documentation for any purpose and without fee is # hereby granted, provided that the above copyright notice appears in # all copies, and that both that copyright notice and this permission # notice appear in supporting documentation, and that the name of # Secret Labs AB or the author not be used in advertising or publicity # pertaining to distribution of the software without specific, written # prior permission. # # SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD # TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- # ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR # BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY # DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, # WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS # ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE # OF THIS SOFTWARE. # -------------------------------------------------------------------- __all__ = [ # public symbols "Comment", "dump", "Element", "ElementTree", "fromstring", "iselement", "iterparse", "parse", "PI", "ProcessingInstruction", "QName", "SubElement", "tostring", "TreeBuilder", "VERSION", "XML", "XMLTreeBuilder", ] # parser api override (None = use default) parser_api = None # TODO: add support for custom namespace resolvers/default namespaces # TODO: add improved support for incremental parsing VERSION = "1.2.7" ## # The <b>Element</b> type is a flexible container object, designed to # store hierarchical data structures in memory. The type can be # described as a cross between a list and a dictionary. # <p> # Each element has a number of properties associated with it: # <ul> # <li>a <i>tag</i>. This is a string identifying what kind of data # this element represents (the element type, in other words).</li> # <li>a number of <i>attributes</i>, stored in a Python dictionary.</li> # <li>a <i>text</i> string.</li> # <li>an optional <i>tail</i> string.</li> # <li>a number of <i>child elements</i>, stored in a Python sequence</li> # </ul> # # To create an element instance, use the {@link #Element} or {@link # #SubElement} factory functions. # <p> # The {@link #ElementTree} class can be used to wrap an element # structure, and convert it from and to XML. ## import sys, re try: import string except: # emulate string module under IronPython class string(object): def join(self, seq, sep): return sep.join(seq) def replace(self, text, *args): return text.replace(*args) def split(self, text, *args): return text.split(*args) def strip(self, text, *args): return text.strip(*args) string = string() class _SimpleElementPath: # emulate pre-1.2 find/findtext/findall behaviour def find(self, element, tag): for elem in element: if elem.tag == tag: return elem return None def findtext(self, element, tag, default=None): for elem in element: if elem.tag == tag: return elem.text or "" return default def findall(self, element, tag): if tag[:3] == ".//": return element.getiterator(tag[3:]) result = [] for elem in element: if elem.tag == tag: result.append(elem) return result try: import ElementPath except ImportError: # FIXME: issue warning in this case? ElementPath = _SimpleElementPath() class DefaultParserAPI: def parse(self, source, parser=None): if not hasattr(source, "read"): source = open(source, "rb") if not parser: parser = XMLTreeBuilder() while 1: data = source.read(32768) if not data: break parser.feed(data) return parser.close() def iterparse(self, source, events): return _iterparse(source, events) def fromstring(self, text): parser = XMLTreeBuilder() parser.feed(text) return parser.close() parser_api = default_parser_api = DefaultParserAPI() ## # Internal element class. This class defines the Element interface, # and provides a reference implementation of this interface. # <p> # You should not create instances of this class directly. Use the # appropriate factory functions instead, such as {@link #Element} # and {@link #SubElement}. # # @see Element # @see SubElement # @see Comment # @see ProcessingInstruction class _ElementInterface: # <tag attrib>text<child/>...</tag>tail ## # (Attribute) Element tag. tag = None ## # (Attribute) Element attribute dictionary. Where possible, use # {@link #_ElementInterface.get}, # {@link #_ElementInterface.set}, # {@link #_ElementInterface.keys}, and # {@link #_ElementInterface.items} to access # element attributes. attrib = None ## # (Attribute) Text before first subelement. This is either a # string or the value None, if there was no text. text = None ## # (Attribute) Text after this element's end tag, but before the # next sibling element's start tag. This is either a string or # the value None, if there was no text. tail = None # text after end tag, if any def __init__(self, tag, attrib): self.tag = tag self.attrib = attrib self._children = [] def __repr__(self): return "<Element %s at %x>" % (self.tag, id(self)) ## # Creates a new element object of the same type as this element. # # @param tag Element tag. # @param attrib Element attributes, given as a dictionary. # @return A new element instance. def makeelement(self, tag, attrib): return Element(tag, attrib) ## # Returns the number of subelements. # # @return The number of subelements. def __len__(self): return len(self._children) ## # Returns the given subelement. # # @param index What subelement to return. # @return The given subelement. # @exception IndexError If the given element does not exist. def __getitem__(self, index): return self._children[index] ## # Replaces the given subelement. # # @param index What subelement to replace. # @param element The new element value. # @exception IndexError If the given element does not exist. # @exception AssertionError If element is not a valid object. def __setitem__(self, index, element): assert iselement(element) self._children[index] = element ## # Deletes the given subelement. # # @param index What subelement to delete. # @exception IndexError If the given element does not exist. def __delitem__(self, index): del self._children[index] ## # Returns a list containing subelements in the given range. # # @param start The first subelement to return. # @param stop The first subelement that shouldn't be returned. # @return A sequence object containing subelements. def __getslice__(self, start, stop): return self._children[start:stop] ## # Replaces a number of subelements with elements from a sequence. # # @param start The first subelement to replace. # @param stop The first subelement that shouldn't be replaced. # @param elements A sequence object with zero or more elements. # @exception AssertionError If a sequence member is not a valid object. def __setslice__(self, start, stop, elements): for element in elements: assert iselement(element) self._children[start:stop] = list(elements) ## # Deletes a number of subelements. # # @param start The first subelement to delete. # @param stop The first subelement to leave in there. def __delslice__(self, start, stop): del self._children[start:stop] ## # Adds a subelement to the end of this element. # # @param element The element to add. # @exception AssertionError If a sequence member is not a valid object. def append(self, element): assert iselement(element) self._children.append(element) ## # Inserts a subelement at the given position in this element. # # @param index Where to insert the new subelement. # @exception AssertionError If the element is not a valid object. def insert(self, index, element): assert iselement(element) self._children.insert(index, element) ## # Removes a matching subelement. Unlike the <b>find</b> methods, # this method compares elements based on identity, not on tag # value or contents. # # @param element What element to remove. # @exception ValueError If a matching element could not be found. # @exception AssertionError If the element is not a valid object. def remove(self, element): assert iselement(element) self._children.remove(element) ## # Returns all subelements. The elements are returned in document # order. # # @return A list of subelements. # @defreturn list of Element instances def getchildren(self): return self._children ## # Finds the first matching subelement, by tag name or path. # # @param path What element to look for. # @return The first matching element, or None if no element was found. # @defreturn Element or None def find(self, path): return ElementPath.find(self, path) ## # Finds text for the first matching subelement, by tag name or path. # # @param path What element to look for. # @param default What to return if the element was not found. # @return The text content of the first matching element, or the # default value no element was found. Note that if the element # has is found, but has no text content, this method returns an # empty string. # @defreturn string def findtext(self, path, default=None): return ElementPath.findtext(self, path, default) ## # Finds all matching subelements, by tag name or path. # # @param path What element to look for. # @return A list or iterator containing all matching elements, # in document order. # @defreturn list of Element instances def findall(self, path): return ElementPath.findall(self, path) ## # Resets an element. This function removes all subelements, clears # all attributes, and sets the text and tail attributes to None. def clear(self): self.attrib.clear() self._children = [] self.text = self.tail = None ## # Gets an element attribute. # # @param key What attribute to look for. # @param default What to return if the attribute was not found. # @return The attribute value, or the default value, if the # attribute was not found. # @defreturn string or None def get(self, key, default=None): return self.attrib.get(key, default) ## # Sets an element attribute. # # @param key What attribute to set. # @param value The attribute value. def set(self, key, value): self.attrib[key] = value ## # Gets a list of attribute names. The names are returned in an # arbitrary order (just like for an ordinary Python dictionary). # # @return A list of element attribute names. # @defreturn list of strings def keys(self): return self.attrib.keys() ## # Gets element attributes, as a sequence. The attributes are # returned in an arbitrary order. # # @return A list of (name, value) tuples for all attributes. # @defreturn list of (string, string) tuples def items(self): return self.attrib.items() ## # Creates a tree iterator. The iterator loops over this element # and all subelements, in document order, and returns all elements # with a matching tag. # <p> # If the tree structure is modified during iteration, the result # is undefined. # # @param tag What tags to look for (default is to return all elements). # @return A list or iterator containing all the matching elements. # @defreturn list or iterator def getiterator(self, tag=None): nodes = [] if tag == "*": tag = None if tag is None or self.tag == tag: nodes.append(self) for node in self._children: nodes.extend(node.getiterator(tag)) return nodes # compatibility _Element = _ElementInterface ## # Element factory. This function returns an object implementing the # standard Element interface. The exact class or type of that object # is implementation dependent, but it will always be compatible with # the {@link #_ElementInterface} class in this module. # <p> # The element name, attribute names, and attribute values can be # either 8-bit ASCII strings or Unicode strings. # # @param tag The element name. # @param attrib An optional dictionary, containing element attributes. # @param **extra Additional attributes, given as keyword arguments. # @return An element instance. # @defreturn Element def Element(tag, attrib={}, **extra): attrib = attrib.copy() attrib.update(extra) return _ElementInterface(tag, attrib) ## # Subelement factory. This function creates an element instance, and # appends it to an existing element. # <p> # The element name, attribute names, and attribute values can be # either 8-bit ASCII strings or Unicode strings. # # @param parent The parent element. # @param tag The subelement name. # @param attrib An optional dictionary, containing element attributes. # @param **extra Additional attributes, given as keyword arguments. # @return An element instance. # @defreturn Element def SubElement(parent, tag, attrib={}, **extra): attrib = attrib.copy() attrib.update(extra) element = parent.makeelement(tag, attrib) parent.append(element) return element ## # Comment element factory. This factory function creates a special # element that will be serialized as an XML comment. # <p> # The comment string can be either an 8-bit ASCII string or a Unicode # string. # # @param text A string containing the comment string. # @return An element instance, representing a comment. # @defreturn Element def Comment(text=None): element = Element(Comment) element.text = text return element ## # PI element factory. This factory function creates a special element # that will be serialized as an XML processing instruction. # # @param target A string containing the PI target. # @param text A string containing the PI contents, if any. # @return An element instance, representing a PI. # @defreturn Element def ProcessingInstruction(target, text=None): element = Element(ProcessingInstruction) element.text = target if text: element.text = element.text + " " + text return element PI = ProcessingInstruction ## # QName wrapper. This can be used to wrap a QName attribute value, in # order to get proper namespace handling on output. # # @param text A string containing the QName value, in the form {uri}local, # or, if the tag argument is given, the URI part of a QName. # @param tag Optional tag. If given, the first argument is interpreted as # an URI, and this argument is interpreted as a local name. # @return An opaque object, representing the QName. class QName: def __init__(self, text_or_uri, tag=None): if tag: text_or_uri = "{%s}%s" % (text_or_uri, tag) self.text = text_or_uri def __str__(self): return self.text def __hash__(self): return hash(self.text) def __cmp__(self, other): if isinstance(other, QName): return cmp(self.text, other.text) return cmp(self.text, other) ## # ElementTree wrapper class. This class represents an entire element # hierarchy, and adds some extra support for serialization to and from # standard XML. # # @param element Optional root element. # @keyparam file Optional file handle or name. If given, the # tree is initialized with the contents of this XML file. class ElementTree: def __init__(self, element=None, file=None): assert element is None or iselement(element) self._root = element # first node if file: self.parse(file) ## # Gets the root element for this tree. # # @return An element instance. # @defreturn Element def getroot(self): return self._root ## # Replaces the root element for this tree. This discards the # current contents of the tree, and replaces it with the given # element. Use with care. # # @param element An element instance. def _setroot(self, element): assert iselement(element) self._root = element ## # Loads an external XML document into this element tree. # # @param source A file name or file object. # @param parser An optional parser instance. If not given, the # standard {@link XMLTreeBuilder} parser is used. # @return The document root element. # @defreturn Element def parse(self, source, parser=None): if parser: tree = default_parser_api.parse(source, parser) else: tree = parser_api.parse(source) self._root = tree return tree ## # Creates a tree iterator for the root element. The iterator loops # over all elements in this tree, in document order. # # @param tag What tags to look for (default is to return all elements) # @return An iterator. # @defreturn iterator def getiterator(self, tag=None): assert self._root is not None return self._root.getiterator(tag) ## # Finds the first toplevel element with given tag. # Same as getroot().find(path). # # @param path What element to look for. # @return The first matching element, or None if no element was found. # @defreturn Element or None def find(self, path): assert self._root is not None if path[:1] == "/": path = "." + path return self._root.find(path) ## # Finds the element text for the first toplevel element with given # tag. Same as getroot().findtext(path). # # @param path What toplevel element to look for. # @param default What to return if the element was not found. # @return The text content of the first matching element, or the # default value no element was found. Note that if the element # has is found, but has no text content, this method returns an # empty string. # @defreturn string def findtext(self, path, default=None): assert self._root is not None if path[:1] == "/": path = "." + path return self._root.findtext(path, default) ## # Finds all toplevel elements with the given tag. # Same as getroot().findall(path). # # @param path What element to look for. # @return A list or iterator containing all matching elements, # in document order. # @defreturn list of Element instances def findall(self, path): assert self._root is not None if path[:1] == "/": path = "." + path return self._root.findall(path) ## # Writes the element tree to a file, as XML. # # @param file A file name, or a file object opened for writing. # @param encoding Optional output encoding (default is US-ASCII). def write(self, file, encoding="us-ascii"): assert self._root is not None if not hasattr(file, "write"): file = open(file, "wb") if not encoding: encoding = "us-ascii" elif encoding != "utf-8" and encoding != "us-ascii": file.write("<?xml version='1.0' encoding='%s'?>\n" % encoding) self._write(file, self._root, encoding, {}) def _write(self, file, node, encoding, namespaces): # write XML to file tag = node.tag if tag is Comment: file.write("<!-- %s -->" % _escape_cdata(node.text, encoding)) elif tag is ProcessingInstruction: file.write("<?%s?>" % _escape_cdata(node.text, encoding)) else: items = node.items() xmlns_items = [] # new namespaces in this scope try: if isinstance(tag, QName) or tag[:1] == "{": tag, xmlns = fixtag(tag, namespaces) if xmlns: xmlns_items.append(xmlns) except TypeError: _raise_serialization_error(tag) file.write("<" + _encode(tag, encoding)) if items or xmlns_items: items.sort() # lexical order for k, v in items: try: if isinstance(k, QName) or k[:1] == "{": k, xmlns = fixtag(k, namespaces) if xmlns: xmlns_items.append(xmlns) except TypeError: _raise_serialization_error(k) try: if isinstance(v, QName): v, xmlns = fixtag(v, namespaces) if xmlns: xmlns_items.append(xmlns) except TypeError: _raise_serialization_error(v) file.write(" %s=\"%s\"" % (_encode(k, encoding), _escape_attrib(v, encoding))) for k, v in xmlns_items: file.write(" %s=\"%s\"" % (_encode(k, encoding), _escape_attrib(v, encoding))) if node.text or len(node): file.write(">") if node.text: file.write(_escape_cdata(node.text, encoding)) for n in node: self._write(file, n, encoding, namespaces) file.write("</" + _encode(tag, encoding) + ">") else: file.write(" />") for k, v in xmlns_items: del namespaces[v] if node.tail: file.write(_escape_cdata(node.tail, encoding)) # -------------------------------------------------------------------- # helpers ## # Checks if an object appears to be a valid element object. # # @param An element instance. # @return A true value if this is an element object. # @defreturn flag def iselement(element): # FIXME: not sure about this; might be a better idea to look # for tag/attrib/text attributes return isinstance(element, _ElementInterface) or hasattr(element, "tag") ## # Writes an element tree or element structure to sys.stdout. This # function should be used for debugging only. # <p> # The exact output format is implementation dependent. In this # version, it's written as an ordinary XML file. # # @param elem An element tree or an individual element. def dump(elem): # debugging if not isinstance(elem, ElementTree): elem = ElementTree(elem) elem.write(sys.stdout) tail = elem.getroot().tail if not tail or tail[-1] != "\n": sys.stdout.write("\n") def _encode(s, encoding): try: return s.encode(encoding) except AttributeError: return s # 1.5.2: assume the string uses the right encoding if sys.version[:3] == "1.5": _escape = re.compile(r"[&<>\"\x80-\xff]+") # 1.5.2 else: _escape = re.compile(eval(r'u"[&<>\"\u0080-\uffff]+"')) _escape_map = { "&": "&amp;", "<": "&lt;", ">": "&gt;", '"': "&quot;", } _namespace_map = { # "well-known" namespace prefixes "http://www.w3.org/XML/1998/namespace": "xml", "http://www.w3.org/1999/xhtml": "html", "http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf", "http://schemas.xmlsoap.org/wsdl/": "wsdl", } def _raise_serialization_error(text): raise TypeError( "cannot serialize %r (type %s)" % (text, type(text).__name__) ) def _encode_entity(text, pattern=_escape): # map reserved and non-ascii characters to numerical entities def escape_entities(m, map=_escape_map): out = [] append = out.append for char in m.group(): text = map.get(char) if text is None: text = "&#%d;" % ord(char) append(text) return string.join(out, "") try: return _encode(pattern.sub(escape_entities, text), "ascii") except TypeError: _raise_serialization_error(text) # # the following functions assume an ascii-compatible encoding # (or "utf-16") def _escape_cdata(text, encoding=None, replace=string.replace): # escape character data try: if encoding: try: text = _encode(text, encoding) except UnicodeError: return _encode_entity(text) if "&" in text: text = replace(text, "&", "&amp;") if "<" in text: text = replace(text, "<", "&lt;") if ">" in text: text = replace(text, ">", "&gt;") return text except (TypeError, AttributeError): _raise_serialization_error(text) def _escape_attrib(text, encoding=None, replace=string.replace): # escape attribute value try: if encoding: try: text = _encode(text, encoding) except UnicodeError: return _encode_entity(text) if "&" in text: text = replace(text, "&", "&amp;") if "\"" in text: text = replace(text, "\"", "&quot;") if "<" in text: text = replace(text, "<", "&lt;") if ">" in text: text = replace(text, ">", "&gt;") if "\n" in text: text = replace(text, "\n", "&#10;") return text except (TypeError, AttributeError): _raise_serialization_error(text) def fixtag(tag, namespaces): # given a decorated tag (of the form {uri}tag), return prefixed # tag and namespace declaration, if any if isinstance(tag, QName): tag = tag.text namespace_uri, tag = string.split(tag[1:], "}", 1) prefix = namespaces.get(namespace_uri) if prefix is None: prefix = _namespace_map.get(namespace_uri) if prefix is None: prefix = "ns%d" % len(namespaces) namespaces[namespace_uri] = prefix if prefix == "xml": xmlns = None else: xmlns = ("xmlns:%s" % prefix, namespace_uri) else: xmlns = None return "%s:%s" % (prefix, tag), xmlns ## # Parses an XML document into an element tree. # # @param source A filename or file object containing XML data. # @param parser An optional parser instance. If not given, the # standard {@link XMLTreeBuilder} parser is used. # @return An ElementTree instance def parse(source, parser=None): if parser: tree = default_parser_api.parse(source, parser) else: tree = parser_api.parse(source) return ElementTree(tree) ## # Parses an XML document into an element tree incrementally, and reports # what's going on to the user. # # @param source A filename or file object containing XML data. # @param events A list of events to report back. If omitted, only "end" # events are reported. # @return A (event, elem) iterator. def iterparse(source, events=None): return parser_api.iterparse(source, events) class _iterparse: def __init__(self, source, events): if not hasattr(source, "read"): source = open(source, "rb") self._file = source self._events = [] self._index = 0 self.root = self._root = None self._parser = XMLTreeBuilder() # wire up the parser for event reporting parser = self._parser._parser append = self._events.append if events is None: events = ["end"] for event in events: if event == "start": try: parser.ordered_attributes = 1 parser.specified_attributes = 1 def handler(tag, attrib_in, event=event, append=append, start=self._parser._start_list): append((event, start(tag, attrib_in))) parser.StartElementHandler = handler except AttributeError: def handler(tag, attrib_in, event=event, append=append, start=self._parser._start): append((event, start(tag, attrib_in))) parser.StartElementHandler = handler elif event == "end": def handler(tag, event=event, append=append, end=self._parser._end): append((event, end(tag))) parser.EndElementHandler = handler elif event == "start-ns": def handler(prefix, uri, event=event, append=append): try: uri = _encode(uri, "ascii") except UnicodeError: pass append((event, (prefix or "", uri))) parser.StartNamespaceDeclHandler = handler elif event == "end-ns": def handler(prefix, event=event, append=append): append((event, None)) parser.EndNamespaceDeclHandler = handler def next(self): while 1: try: item = self._events[self._index] except IndexError: if self._parser is None: self.root = self._root try: raise StopIteration except NameError: raise IndexError # load event buffer del self._events[:] self._index = 0 data = self._file.read(16384) if data: self._parser.feed(data) else: self._root = self._parser.close() self._parser = None else: self._index = self._index + 1 return item try: iter def __iter__(self): return self except NameError: def __getitem__(self, index): return self.next() ## # Parses an XML document from a string constant. This function can # be used to embed "XML literals" in Python code. # # @param source A string containing XML data. # @return An Element instance. # @defreturn Element def XML(text): api = parser_api or default_parser_api return api.fromstring(text) ## # Parses an XML document from a string constant, and also returns # a dictionary which maps from element id:s to elements. # # @param source A string containing XML data. # @return A tuple containing an Element instance and a dictionary. # @defreturn (Element, dictionary) def XMLID(text): api = parser_api or default_parser_api tree = api.fromstring(text) ids = {} for elem in tree.getiterator(): id = elem.get("id") if id: ids[id] = elem return tree, ids ## # Parses an XML document from a string constant. Same as {@link #XML}. # # @def fromstring(text) # @param source A string containing XML data. # @return An Element instance. # @defreturn Element fromstring = XML ## # Generates a string representation of an XML element, including all # subelements. # # @param element An Element instance. # @return An encoded string containing the XML data. # @defreturn string def tostring(element, encoding=None): class dummy: pass data = [] file = dummy() file.write = data.append ElementTree(element).write(file, encoding) return string.join(data, "") ## # Generic element structure builder. This builder converts a sequence # of {@link #TreeBuilder.start}, {@link #TreeBuilder.data}, and {@link # #TreeBuilder.end} method calls to a well-formed element structure. # <p> # You can use this class to build an element structure using a custom XML # parser, or a parser for some other XML-like format. # # @param element_factory Optional element factory. This factory # is called to create new Element instances, as necessary. class TreeBuilder: def __init__(self, element_factory=None): self._data = [] # data collector self._elem = [] # element stack self._last = None # last element self._tail = None # true if we're after an end tag if element_factory is None: element_factory = _ElementInterface self._factory = element_factory ## # Flushes the parser buffers, and returns the toplevel documen # element. # # @return An Element instance. # @defreturn Element def close(self): assert len(self._elem) == 0, "missing end tags" assert self._last != None, "missing toplevel element" return self._last def _flush(self): if self._data: if self._last is not None: text = string.join(self._data, "") if self._tail: assert self._last.tail is None, "internal error (tail)" self._last.tail = text else: assert self._last.text is None, "internal error (text)" self._last.text = text self._data = [] ## # Adds text to the current element. # # @param data A string. This should be either an 8-bit string # containing ASCII text, or a Unicode string. def data(self, data): self._data.append(data) ## # Opens a new element. # # @param tag The element name. # @param attrib A dictionary containing element attributes. # @return The opened element. # @defreturn Element def start(self, tag, attrs): self._flush() self._last = elem = self._factory(tag, attrs) if self._elem: self._elem[-1].append(elem) self._elem.append(elem) self._tail = 0 return elem ## # Closes the current element. # # @param tag The element name. # @return The closed element. # @defreturn Element def end(self, tag): self._flush() self._last = self._elem.pop() assert self._last.tag == tag,\ "end tag mismatch (expected %s, got %s)" % ( self._last.tag, tag) self._tail = 1 return self._last ## # Element structure builder for XML source data, based on the # <b>expat</b> parser. # # @keyparam target Target object. If omitted, the builder uses an # instance of the standard {@link #TreeBuilder} class. # @keyparam html Predefine HTML entities. This flag is not supported # by the current implementation. # @see #ElementTree # @see #TreeBuilder class XMLTreeBuilder: def __init__(self, html=0, target=None): try: from xml.parsers import expat except ImportError: raise ImportError( "No module named expat; use SimpleXMLTreeBuilder instead" ) self._parser = parser = expat.ParserCreate(None, "}") if target is None: target = TreeBuilder() self._target = target self._names = {} # name memo cache # callbacks parser.DefaultHandlerExpand = self._default parser.StartElementHandler = self._start parser.EndElementHandler = self._end parser.CharacterDataHandler = self._data # let expat do the buffering, if supported try: self._parser.buffer_text = 1 except AttributeError: pass # use new-style attribute handling, if supported try: self._parser.ordered_attributes = 1 self._parser.specified_attributes = 1 parser.StartElementHandler = self._start_list except AttributeError: pass encoding = None if not parser.returns_unicode: encoding = "utf-8" # target.xml(encoding, None) self._doctype = None self.entity = {} def _fixtext(self, text): # convert text string to ascii, if possible try: return _encode(text, "ascii") except UnicodeError: return text def _fixname(self, key): # expand qname, and convert name string to ascii, if possible try: name = self._names[key] except KeyError: name = key if "}" in name: name = "{" + name self._names[key] = name = self._fixtext(name) return name def _start(self, tag, attrib_in): fixname = self._fixname tag = fixname(tag) attrib = {} for key, value in attrib_in.items(): attrib[fixname(key)] = self._fixtext(value) return self._target.start(tag, attrib) def _start_list(self, tag, attrib_in): fixname = self._fixname tag = fixname(tag) attrib = {} if attrib_in: for i in range(0, len(attrib_in), 2): attrib[fixname(attrib_in[i])] = self._fixtext(attrib_in[i+1]) return self._target.start(tag, attrib) def _data(self, text): return self._target.data(self._fixtext(text)) def _end(self, tag): return self._target.end(self._fixname(tag)) def _default(self, text): prefix = text[:1] if prefix == "&": # deal with undefined entities try: self._target.data(self.entity[text[1:-1]]) except KeyError: from xml.parsers import expat raise expat.error( "undefined entity %s: line %d, column %d" % (text, self._parser.ErrorLineNumber, self._parser.ErrorColumnNumber) ) elif prefix == "<" and text[:9] == "<!DOCTYPE": self._doctype = [] # inside a doctype declaration elif self._doctype is not None: # parse doctype contents if prefix == ">": self._doctype = None return text = string.strip(text) if not text: return self._doctype.append(text) n = len(self._doctype) if n > 2: type = self._doctype[1] if type == "PUBLIC" and n == 4: name, type, pubid, system = self._doctype elif type == "SYSTEM" and n == 3: name, type, system = self._doctype pubid = None else: return if pubid: pubid = pubid[1:-1] self.doctype(name, pubid, system[1:-1]) self._doctype = None ## # Handles a doctype declaration. # # @param name Doctype name. # @param pubid Public identifier. # @param system System identifier. def doctype(self, name, pubid, system): pass ## # Feeds data to the parser. # # @param data Encoded data. def feed(self, data): self._parser.Parse(data, 0) ## # Finishes feeding data to the parser. # # @return An element structure. # @defreturn Element def close(self): self._parser.Parse("", 1) # end of data tree = self._target.close() del self._target, self._parser # get rid of circular references return tree # -------------------------------------------------------------------- # load platform specific extensions if sys.platform == "cli": try: import ElementIron except ImportError: pass # fall back on optional pyexpat emulation else: parser_api = ElementIron.ParserAPI(TreeBuilder) elif sys.platform.startswith("java"): try: import ElementJava except ImportError: pass else: parser_api = ElementJava.ParserAPI(TreeBuilder)
gpl-2.0
mromanello/CitationExtractor
citation_extractor/Utils/IO.py
1
1302
# -*- coding: utf-8 -*- # author: Matteo Romanello, matteo.romanello@gmail.com from __future__ import print_function import pdb import logging import os import codecs #import knowledge_base import glob import sys,pprint,re,string import pandas as pd from random import * from pyCTS import CTS_URN from .strmatching import StringUtils #import citation_extractor #import xml.dom.minidom as mdom global logger logger = logging.getLogger(__name__) NIL_ENTITY = "urn:cts:GreekLatinLit:NIL" def init_logger(log_file=None, loglevel=logging.DEBUG): """ Initialise the logger """ if(log_file !="" or log_file is not None): logging.basicConfig( filename=log_file ,level=loglevel,format='%(asctime)s - %(name)s - [%(levelname)s] %(message)s',filemode='w',datefmt='%a, %d %b %Y %H:%M:%S' ) logger = logging.getLogger(__name__) logger.info("Logger initialised") else: logger = logging.getLogger(__name__) logger.setLevel(loglevel) ch = logging.StreamHandler() ch.setLevel(loglevel) formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") ch.setFormatter(formatter) logger.addHandler(ch) logger.info("Logger initialised") return logger
gpl-3.0
meduz/scikit-learn
sklearn/model_selection/tests/test_validation.py
2
38986
"""Test the validation module""" from __future__ import division import sys import warnings import tempfile import os from time import sleep import numpy as np from scipy.sparse import coo_matrix, csr_matrix from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_false from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_raise_message from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_less from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_warns from sklearn.utils.mocking import CheckingClassifier, MockDataFrame from sklearn.model_selection import cross_val_score from sklearn.model_selection import cross_val_predict from sklearn.model_selection import permutation_test_score from sklearn.model_selection import KFold from sklearn.model_selection import StratifiedKFold from sklearn.model_selection import LeaveOneOut from sklearn.model_selection import LeaveOneGroupOut from sklearn.model_selection import LeavePGroupsOut from sklearn.model_selection import GroupKFold from sklearn.model_selection import GroupShuffleSplit from sklearn.model_selection import learning_curve from sklearn.model_selection import validation_curve from sklearn.model_selection._validation import _check_is_permutation from sklearn.datasets import make_regression from sklearn.datasets import load_boston from sklearn.datasets import load_iris from sklearn.metrics import explained_variance_score from sklearn.metrics import make_scorer from sklearn.metrics import precision_score from sklearn.linear_model import Ridge, LogisticRegression from sklearn.linear_model import PassiveAggressiveClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.svm import SVC from sklearn.cluster import KMeans from sklearn.preprocessing import Imputer from sklearn.pipeline import Pipeline from sklearn.externals.six.moves import cStringIO as StringIO from sklearn.base import BaseEstimator from sklearn.multiclass import OneVsRestClassifier from sklearn.utils import shuffle from sklearn.datasets import make_classification from sklearn.datasets import make_multilabel_classification from sklearn.model_selection.tests.common import OneTimeSplitter try: WindowsError except NameError: WindowsError = None class MockImprovingEstimator(BaseEstimator): """Dummy classifier to test the learning curve""" def __init__(self, n_max_train_sizes): self.n_max_train_sizes = n_max_train_sizes self.train_sizes = 0 self.X_subset = None def fit(self, X_subset, y_subset=None): self.X_subset = X_subset self.train_sizes = X_subset.shape[0] return self def predict(self, X): raise NotImplementedError def score(self, X=None, Y=None): # training score becomes worse (2 -> 1), test error better (0 -> 1) if self._is_training_data(X): return 2. - float(self.train_sizes) / self.n_max_train_sizes else: return float(self.train_sizes) / self.n_max_train_sizes def _is_training_data(self, X): return X is self.X_subset class MockIncrementalImprovingEstimator(MockImprovingEstimator): """Dummy classifier that provides partial_fit""" def __init__(self, n_max_train_sizes): super(MockIncrementalImprovingEstimator, self).__init__(n_max_train_sizes) self.x = None def _is_training_data(self, X): return self.x in X def partial_fit(self, X, y=None, **params): self.train_sizes += X.shape[0] self.x = X[0] class MockEstimatorWithParameter(BaseEstimator): """Dummy classifier to test the validation curve""" def __init__(self, param=0.5): self.X_subset = None self.param = param def fit(self, X_subset, y_subset): self.X_subset = X_subset self.train_sizes = X_subset.shape[0] return self def predict(self, X): raise NotImplementedError def score(self, X=None, y=None): return self.param if self._is_training_data(X) else 1 - self.param def _is_training_data(self, X): return X is self.X_subset class MockClassifier(object): """Dummy classifier to test the cross-validation""" def __init__(self, a=0, allow_nd=False): self.a = a self.allow_nd = allow_nd def fit(self, X, Y=None, sample_weight=None, class_prior=None, sparse_sample_weight=None, sparse_param=None, dummy_int=None, dummy_str=None, dummy_obj=None, callback=None): """The dummy arguments are to test that this fit function can accept non-array arguments through cross-validation, such as: - int - str (this is actually array-like) - object - function """ self.dummy_int = dummy_int self.dummy_str = dummy_str self.dummy_obj = dummy_obj if callback is not None: callback(self) if self.allow_nd: X = X.reshape(len(X), -1) if X.ndim >= 3 and not self.allow_nd: raise ValueError('X cannot be d') if sample_weight is not None: assert_true(sample_weight.shape[0] == X.shape[0], 'MockClassifier extra fit_param sample_weight.shape[0]' ' is {0}, should be {1}'.format(sample_weight.shape[0], X.shape[0])) if class_prior is not None: assert_true(class_prior.shape[0] == len(np.unique(y)), 'MockClassifier extra fit_param class_prior.shape[0]' ' is {0}, should be {1}'.format(class_prior.shape[0], len(np.unique(y)))) if sparse_sample_weight is not None: fmt = ('MockClassifier extra fit_param sparse_sample_weight' '.shape[0] is {0}, should be {1}') assert_true(sparse_sample_weight.shape[0] == X.shape[0], fmt.format(sparse_sample_weight.shape[0], X.shape[0])) if sparse_param is not None: fmt = ('MockClassifier extra fit_param sparse_param.shape ' 'is ({0}, {1}), should be ({2}, {3})') assert_true(sparse_param.shape == P_sparse.shape, fmt.format(sparse_param.shape[0], sparse_param.shape[1], P_sparse.shape[0], P_sparse.shape[1])) return self def predict(self, T): if self.allow_nd: T = T.reshape(len(T), -1) return T[:, 0] def score(self, X=None, Y=None): return 1. / (1 + np.abs(self.a)) def get_params(self, deep=False): return {'a': self.a, 'allow_nd': self.allow_nd} # XXX: use 2D array, since 1D X is being detected as a single sample in # check_consistent_length X = np.ones((10, 2)) X_sparse = coo_matrix(X) y = np.array([0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) # The number of samples per class needs to be > n_splits, # for StratifiedKFold(n_splits=3) y2 = np.array([1, 1, 1, 2, 2, 2, 3, 3, 3, 3]) P_sparse = coo_matrix(np.eye(5)) def test_cross_val_score(): clf = MockClassifier() for a in range(-10, 10): clf.a = a # Smoke test scores = cross_val_score(clf, X, y2) assert_array_equal(scores, clf.score(X, y2)) # test with multioutput y multioutput_y = np.column_stack([y2, y2[::-1]]) scores = cross_val_score(clf, X_sparse, multioutput_y) assert_array_equal(scores, clf.score(X_sparse, multioutput_y)) scores = cross_val_score(clf, X_sparse, y2) assert_array_equal(scores, clf.score(X_sparse, y2)) # test with multioutput y scores = cross_val_score(clf, X_sparse, multioutput_y) assert_array_equal(scores, clf.score(X_sparse, multioutput_y)) # test with X and y as list list_check = lambda x: isinstance(x, list) clf = CheckingClassifier(check_X=list_check) scores = cross_val_score(clf, X.tolist(), y2.tolist()) clf = CheckingClassifier(check_y=list_check) scores = cross_val_score(clf, X, y2.tolist()) assert_raises(ValueError, cross_val_score, clf, X, y2, scoring="sklearn") # test with 3d X and X_3d = X[:, :, np.newaxis] clf = MockClassifier(allow_nd=True) scores = cross_val_score(clf, X_3d, y2) clf = MockClassifier(allow_nd=False) assert_raises(ValueError, cross_val_score, clf, X_3d, y2) def test_cross_val_score_predict_groups(): # Check if ValueError (when groups is None) propagates to cross_val_score # and cross_val_predict # And also check if groups is correctly passed to the cv object X, y = make_classification(n_samples=20, n_classes=2, random_state=0) clf = SVC(kernel="linear") group_cvs = [LeaveOneGroupOut(), LeavePGroupsOut(2), GroupKFold(), GroupShuffleSplit()] for cv in group_cvs: assert_raise_message(ValueError, "The groups parameter should not be None", cross_val_score, estimator=clf, X=X, y=y, cv=cv) assert_raise_message(ValueError, "The groups parameter should not be None", cross_val_predict, estimator=clf, X=X, y=y, cv=cv) def test_cross_val_score_pandas(): # check cross_val_score doesn't destroy pandas dataframe types = [(MockDataFrame, MockDataFrame)] try: from pandas import Series, DataFrame types.append((Series, DataFrame)) except ImportError: pass for TargetType, InputFeatureType in types: # X dataframe, y series # 3 fold cross val is used so we need atleast 3 samples per class X_df, y_ser = InputFeatureType(X), TargetType(y2) check_df = lambda x: isinstance(x, InputFeatureType) check_series = lambda x: isinstance(x, TargetType) clf = CheckingClassifier(check_X=check_df, check_y=check_series) cross_val_score(clf, X_df, y_ser) def test_cross_val_score_mask(): # test that cross_val_score works with boolean masks svm = SVC(kernel="linear") iris = load_iris() X, y = iris.data, iris.target kfold = KFold(5) scores_indices = cross_val_score(svm, X, y, cv=kfold) kfold = KFold(5) cv_masks = [] for train, test in kfold.split(X, y): mask_train = np.zeros(len(y), dtype=np.bool) mask_test = np.zeros(len(y), dtype=np.bool) mask_train[train] = 1 mask_test[test] = 1 cv_masks.append((train, test)) scores_masks = cross_val_score(svm, X, y, cv=cv_masks) assert_array_equal(scores_indices, scores_masks) def test_cross_val_score_precomputed(): # test for svm with precomputed kernel svm = SVC(kernel="precomputed") iris = load_iris() X, y = iris.data, iris.target linear_kernel = np.dot(X, X.T) score_precomputed = cross_val_score(svm, linear_kernel, y) svm = SVC(kernel="linear") score_linear = cross_val_score(svm, X, y) assert_array_almost_equal(score_precomputed, score_linear) # test with callable svm = SVC(kernel=lambda x, y: np.dot(x, y.T)) score_callable = cross_val_score(svm, X, y) assert_array_almost_equal(score_precomputed, score_callable) # Error raised for non-square X svm = SVC(kernel="precomputed") assert_raises(ValueError, cross_val_score, svm, X, y) # test error is raised when the precomputed kernel is not array-like # or sparse assert_raises(ValueError, cross_val_score, svm, linear_kernel.tolist(), y) def test_cross_val_score_fit_params(): clf = MockClassifier() n_samples = X.shape[0] n_classes = len(np.unique(y)) W_sparse = coo_matrix((np.array([1]), (np.array([1]), np.array([0]))), shape=(10, 1)) P_sparse = coo_matrix(np.eye(5)) DUMMY_INT = 42 DUMMY_STR = '42' DUMMY_OBJ = object() def assert_fit_params(clf): # Function to test that the values are passed correctly to the # classifier arguments for non-array type assert_equal(clf.dummy_int, DUMMY_INT) assert_equal(clf.dummy_str, DUMMY_STR) assert_equal(clf.dummy_obj, DUMMY_OBJ) fit_params = {'sample_weight': np.ones(n_samples), 'class_prior': np.ones(n_classes) / n_classes, 'sparse_sample_weight': W_sparse, 'sparse_param': P_sparse, 'dummy_int': DUMMY_INT, 'dummy_str': DUMMY_STR, 'dummy_obj': DUMMY_OBJ, 'callback': assert_fit_params} cross_val_score(clf, X, y, fit_params=fit_params) def test_cross_val_score_score_func(): clf = MockClassifier() _score_func_args = [] def score_func(y_test, y_predict): _score_func_args.append((y_test, y_predict)) return 1.0 with warnings.catch_warnings(record=True): scoring = make_scorer(score_func) score = cross_val_score(clf, X, y, scoring=scoring) assert_array_equal(score, [1.0, 1.0, 1.0]) assert len(_score_func_args) == 3 def test_cross_val_score_errors(): class BrokenEstimator: pass assert_raises(TypeError, cross_val_score, BrokenEstimator(), X) def test_cross_val_score_with_score_func_classification(): iris = load_iris() clf = SVC(kernel='linear') # Default score (should be the accuracy score) scores = cross_val_score(clf, iris.data, iris.target, cv=5) assert_array_almost_equal(scores, [0.97, 1., 0.97, 0.97, 1.], 2) # Correct classification score (aka. zero / one score) - should be the # same as the default estimator score zo_scores = cross_val_score(clf, iris.data, iris.target, scoring="accuracy", cv=5) assert_array_almost_equal(zo_scores, [0.97, 1., 0.97, 0.97, 1.], 2) # F1 score (class are balanced so f1_score should be equal to zero/one # score f1_scores = cross_val_score(clf, iris.data, iris.target, scoring="f1_weighted", cv=5) assert_array_almost_equal(f1_scores, [0.97, 1., 0.97, 0.97, 1.], 2) def test_cross_val_score_with_score_func_regression(): X, y = make_regression(n_samples=30, n_features=20, n_informative=5, random_state=0) reg = Ridge() # Default score of the Ridge regression estimator scores = cross_val_score(reg, X, y, cv=5) assert_array_almost_equal(scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2) # R2 score (aka. determination coefficient) - should be the # same as the default estimator score r2_scores = cross_val_score(reg, X, y, scoring="r2", cv=5) assert_array_almost_equal(r2_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2) # Mean squared error; this is a loss function, so "scores" are negative neg_mse_scores = cross_val_score(reg, X, y, cv=5, scoring="neg_mean_squared_error") expected_neg_mse = np.array([-763.07, -553.16, -274.38, -273.26, -1681.99]) assert_array_almost_equal(neg_mse_scores, expected_neg_mse, 2) # Explained variance scoring = make_scorer(explained_variance_score) ev_scores = cross_val_score(reg, X, y, cv=5, scoring=scoring) assert_array_almost_equal(ev_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2) def test_permutation_score(): iris = load_iris() X = iris.data X_sparse = coo_matrix(X) y = iris.target svm = SVC(kernel='linear') cv = StratifiedKFold(2) score, scores, pvalue = permutation_test_score( svm, X, y, n_permutations=30, cv=cv, scoring="accuracy") assert_greater(score, 0.9) assert_almost_equal(pvalue, 0.0, 1) score_group, _, pvalue_group = permutation_test_score( svm, X, y, n_permutations=30, cv=cv, scoring="accuracy", groups=np.ones(y.size), random_state=0) assert_true(score_group == score) assert_true(pvalue_group == pvalue) # check that we obtain the same results with a sparse representation svm_sparse = SVC(kernel='linear') cv_sparse = StratifiedKFold(2) score_group, _, pvalue_group = permutation_test_score( svm_sparse, X_sparse, y, n_permutations=30, cv=cv_sparse, scoring="accuracy", groups=np.ones(y.size), random_state=0) assert_true(score_group == score) assert_true(pvalue_group == pvalue) # test with custom scoring object def custom_score(y_true, y_pred): return (((y_true == y_pred).sum() - (y_true != y_pred).sum()) / y_true.shape[0]) scorer = make_scorer(custom_score) score, _, pvalue = permutation_test_score( svm, X, y, n_permutations=100, scoring=scorer, cv=cv, random_state=0) assert_almost_equal(score, .93, 2) assert_almost_equal(pvalue, 0.01, 3) # set random y y = np.mod(np.arange(len(y)), 3) score, scores, pvalue = permutation_test_score( svm, X, y, n_permutations=30, cv=cv, scoring="accuracy") assert_less(score, 0.5) assert_greater(pvalue, 0.2) def test_permutation_test_score_allow_nans(): # Check that permutation_test_score allows input data with NaNs X = np.arange(200, dtype=np.float64).reshape(10, -1) X[2, :] = np.nan y = np.repeat([0, 1], X.shape[0] / 2) p = Pipeline([ ('imputer', Imputer(strategy='mean', missing_values='NaN')), ('classifier', MockClassifier()), ]) permutation_test_score(p, X, y, cv=5) def test_cross_val_score_allow_nans(): # Check that cross_val_score allows input data with NaNs X = np.arange(200, dtype=np.float64).reshape(10, -1) X[2, :] = np.nan y = np.repeat([0, 1], X.shape[0] / 2) p = Pipeline([ ('imputer', Imputer(strategy='mean', missing_values='NaN')), ('classifier', MockClassifier()), ]) cross_val_score(p, X, y, cv=5) def test_cross_val_score_multilabel(): X = np.array([[-3, 4], [2, 4], [3, 3], [0, 2], [-3, 1], [-2, 1], [0, 0], [-2, -1], [-1, -2], [1, -2]]) y = np.array([[1, 1], [0, 1], [0, 1], [0, 1], [1, 1], [0, 1], [1, 0], [1, 1], [1, 0], [0, 0]]) clf = KNeighborsClassifier(n_neighbors=1) scoring_micro = make_scorer(precision_score, average='micro') scoring_macro = make_scorer(precision_score, average='macro') scoring_samples = make_scorer(precision_score, average='samples') score_micro = cross_val_score(clf, X, y, scoring=scoring_micro, cv=5) score_macro = cross_val_score(clf, X, y, scoring=scoring_macro, cv=5) score_samples = cross_val_score(clf, X, y, scoring=scoring_samples, cv=5) assert_almost_equal(score_micro, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 3]) assert_almost_equal(score_macro, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 4]) assert_almost_equal(score_samples, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 4]) def test_cross_val_predict(): boston = load_boston() X, y = boston.data, boston.target cv = KFold() est = Ridge() # Naive loop (should be same as cross_val_predict): preds2 = np.zeros_like(y) for train, test in cv.split(X, y): est.fit(X[train], y[train]) preds2[test] = est.predict(X[test]) preds = cross_val_predict(est, X, y, cv=cv) assert_array_almost_equal(preds, preds2) preds = cross_val_predict(est, X, y) assert_equal(len(preds), len(y)) cv = LeaveOneOut() preds = cross_val_predict(est, X, y, cv=cv) assert_equal(len(preds), len(y)) Xsp = X.copy() Xsp *= (Xsp > np.median(Xsp)) Xsp = coo_matrix(Xsp) preds = cross_val_predict(est, Xsp, y) assert_array_almost_equal(len(preds), len(y)) preds = cross_val_predict(KMeans(), X) assert_equal(len(preds), len(y)) class BadCV(): def split(self, X, y=None, groups=None): for i in range(4): yield np.array([0, 1, 2, 3]), np.array([4, 5, 6, 7, 8]) assert_raises(ValueError, cross_val_predict, est, X, y, cv=BadCV()) def test_cross_val_predict_input_types(): iris = load_iris() X, y = iris.data, iris.target X_sparse = coo_matrix(X) multioutput_y = np.column_stack([y, y[::-1]]) clf = Ridge(fit_intercept=False, random_state=0) # 3 fold cv is used --> atleast 3 samples per class # Smoke test predictions = cross_val_predict(clf, X, y) assert_equal(predictions.shape, (150,)) # test with multioutput y predictions = cross_val_predict(clf, X_sparse, multioutput_y) assert_equal(predictions.shape, (150, 2)) predictions = cross_val_predict(clf, X_sparse, y) assert_array_equal(predictions.shape, (150,)) # test with multioutput y predictions = cross_val_predict(clf, X_sparse, multioutput_y) assert_array_equal(predictions.shape, (150, 2)) # test with X and y as list list_check = lambda x: isinstance(x, list) clf = CheckingClassifier(check_X=list_check) predictions = cross_val_predict(clf, X.tolist(), y.tolist()) clf = CheckingClassifier(check_y=list_check) predictions = cross_val_predict(clf, X, y.tolist()) # test with 3d X and X_3d = X[:, :, np.newaxis] check_3d = lambda x: x.ndim == 3 clf = CheckingClassifier(check_X=check_3d) predictions = cross_val_predict(clf, X_3d, y) assert_array_equal(predictions.shape, (150,)) def test_cross_val_predict_pandas(): # check cross_val_score doesn't destroy pandas dataframe types = [(MockDataFrame, MockDataFrame)] try: from pandas import Series, DataFrame types.append((Series, DataFrame)) except ImportError: pass for TargetType, InputFeatureType in types: # X dataframe, y series X_df, y_ser = InputFeatureType(X), TargetType(y2) check_df = lambda x: isinstance(x, InputFeatureType) check_series = lambda x: isinstance(x, TargetType) clf = CheckingClassifier(check_X=check_df, check_y=check_series) cross_val_predict(clf, X_df, y_ser) def test_cross_val_score_sparse_fit_params(): iris = load_iris() X, y = iris.data, iris.target clf = MockClassifier() fit_params = {'sparse_sample_weight': coo_matrix(np.eye(X.shape[0]))} a = cross_val_score(clf, X, y, fit_params=fit_params) assert_array_equal(a, np.ones(3)) def test_learning_curve(): n_samples = 30 n_splits = 3 X, y = make_classification(n_samples=n_samples, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) estimator = MockImprovingEstimator(n_samples * ((n_splits - 1) / n_splits)) for shuffle_train in [False, True]: with warnings.catch_warnings(record=True) as w: train_sizes, train_scores, test_scores = learning_curve( estimator, X, y, cv=KFold(n_splits=n_splits), train_sizes=np.linspace(0.1, 1.0, 10), shuffle=shuffle_train) if len(w) > 0: raise RuntimeError("Unexpected warning: %r" % w[0].message) assert_equal(train_scores.shape, (10, 3)) assert_equal(test_scores.shape, (10, 3)) assert_array_equal(train_sizes, np.linspace(2, 20, 10)) assert_array_almost_equal(train_scores.mean(axis=1), np.linspace(1.9, 1.0, 10)) assert_array_almost_equal(test_scores.mean(axis=1), np.linspace(0.1, 1.0, 10)) # Test a custom cv splitter that can iterate only once with warnings.catch_warnings(record=True) as w: train_sizes2, train_scores2, test_scores2 = learning_curve( estimator, X, y, cv=OneTimeSplitter(n_splits=n_splits, n_samples=n_samples), train_sizes=np.linspace(0.1, 1.0, 10), shuffle=shuffle_train) if len(w) > 0: raise RuntimeError("Unexpected warning: %r" % w[0].message) assert_array_almost_equal(train_scores2, train_scores) assert_array_almost_equal(test_scores2, test_scores) def test_learning_curve_unsupervised(): X, _ = make_classification(n_samples=30, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) estimator = MockImprovingEstimator(20) train_sizes, train_scores, test_scores = learning_curve( estimator, X, y=None, cv=3, train_sizes=np.linspace(0.1, 1.0, 10)) assert_array_equal(train_sizes, np.linspace(2, 20, 10)) assert_array_almost_equal(train_scores.mean(axis=1), np.linspace(1.9, 1.0, 10)) assert_array_almost_equal(test_scores.mean(axis=1), np.linspace(0.1, 1.0, 10)) def test_learning_curve_verbose(): X, y = make_classification(n_samples=30, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) estimator = MockImprovingEstimator(20) old_stdout = sys.stdout sys.stdout = StringIO() try: train_sizes, train_scores, test_scores = \ learning_curve(estimator, X, y, cv=3, verbose=1) finally: out = sys.stdout.getvalue() sys.stdout.close() sys.stdout = old_stdout assert("[learning_curve]" in out) def test_learning_curve_incremental_learning_not_possible(): X, y = make_classification(n_samples=2, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) # The mockup does not have partial_fit() estimator = MockImprovingEstimator(1) assert_raises(ValueError, learning_curve, estimator, X, y, exploit_incremental_learning=True) def test_learning_curve_incremental_learning(): X, y = make_classification(n_samples=30, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) estimator = MockIncrementalImprovingEstimator(20) for shuffle_train in [False, True]: train_sizes, train_scores, test_scores = learning_curve( estimator, X, y, cv=3, exploit_incremental_learning=True, train_sizes=np.linspace(0.1, 1.0, 10), shuffle=shuffle_train) assert_array_equal(train_sizes, np.linspace(2, 20, 10)) assert_array_almost_equal(train_scores.mean(axis=1), np.linspace(1.9, 1.0, 10)) assert_array_almost_equal(test_scores.mean(axis=1), np.linspace(0.1, 1.0, 10)) def test_learning_curve_incremental_learning_unsupervised(): X, _ = make_classification(n_samples=30, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) estimator = MockIncrementalImprovingEstimator(20) train_sizes, train_scores, test_scores = learning_curve( estimator, X, y=None, cv=3, exploit_incremental_learning=True, train_sizes=np.linspace(0.1, 1.0, 10)) assert_array_equal(train_sizes, np.linspace(2, 20, 10)) assert_array_almost_equal(train_scores.mean(axis=1), np.linspace(1.9, 1.0, 10)) assert_array_almost_equal(test_scores.mean(axis=1), np.linspace(0.1, 1.0, 10)) def test_learning_curve_batch_and_incremental_learning_are_equal(): X, y = make_classification(n_samples=30, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) train_sizes = np.linspace(0.2, 1.0, 5) estimator = PassiveAggressiveClassifier(n_iter=1, shuffle=False) train_sizes_inc, train_scores_inc, test_scores_inc = \ learning_curve( estimator, X, y, train_sizes=train_sizes, cv=3, exploit_incremental_learning=True) train_sizes_batch, train_scores_batch, test_scores_batch = \ learning_curve( estimator, X, y, cv=3, train_sizes=train_sizes, exploit_incremental_learning=False) assert_array_equal(train_sizes_inc, train_sizes_batch) assert_array_almost_equal(train_scores_inc.mean(axis=1), train_scores_batch.mean(axis=1)) assert_array_almost_equal(test_scores_inc.mean(axis=1), test_scores_batch.mean(axis=1)) def test_learning_curve_n_sample_range_out_of_bounds(): X, y = make_classification(n_samples=30, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) estimator = MockImprovingEstimator(20) assert_raises(ValueError, learning_curve, estimator, X, y, cv=3, train_sizes=[0, 1]) assert_raises(ValueError, learning_curve, estimator, X, y, cv=3, train_sizes=[0.0, 1.0]) assert_raises(ValueError, learning_curve, estimator, X, y, cv=3, train_sizes=[0.1, 1.1]) assert_raises(ValueError, learning_curve, estimator, X, y, cv=3, train_sizes=[0, 20]) assert_raises(ValueError, learning_curve, estimator, X, y, cv=3, train_sizes=[1, 21]) def test_learning_curve_remove_duplicate_sample_sizes(): X, y = make_classification(n_samples=3, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) estimator = MockImprovingEstimator(2) train_sizes, _, _ = assert_warns( RuntimeWarning, learning_curve, estimator, X, y, cv=3, train_sizes=np.linspace(0.33, 1.0, 3)) assert_array_equal(train_sizes, [1, 2]) def test_learning_curve_with_boolean_indices(): X, y = make_classification(n_samples=30, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) estimator = MockImprovingEstimator(20) cv = KFold(n_splits=3) train_sizes, train_scores, test_scores = learning_curve( estimator, X, y, cv=cv, train_sizes=np.linspace(0.1, 1.0, 10)) assert_array_equal(train_sizes, np.linspace(2, 20, 10)) assert_array_almost_equal(train_scores.mean(axis=1), np.linspace(1.9, 1.0, 10)) assert_array_almost_equal(test_scores.mean(axis=1), np.linspace(0.1, 1.0, 10)) def test_learning_curve_with_shuffle(): # Following test case was designed this way to verify the code # changes made in pull request: #7506. X = np.array([[1, 2], [3, 4], [5, 6], [7, 8], [11, 12], [13, 14], [15, 16], [17, 18], [19, 20], [7, 8], [9, 10], [11, 12], [13, 14], [15, 16], [17, 18]]) y = np.array([1, 1, 1, 2, 3, 4, 1, 1, 2, 3, 4, 1, 2, 3, 4]) groups = np.array([1, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3, 4, 4, 4, 4]) # Splits on these groups fail without shuffle as the first iteration # of the learning curve doesn't contain label 4 in the training set. estimator = PassiveAggressiveClassifier(shuffle=False) cv = GroupKFold(n_splits=2) train_sizes_batch, train_scores_batch, test_scores_batch = learning_curve( estimator, X, y, cv=cv, n_jobs=1, train_sizes=np.linspace(0.3, 1.0, 3), groups=groups, shuffle=True, random_state=2) assert_array_almost_equal(train_scores_batch.mean(axis=1), np.array([0.75, 0.3, 0.36111111])) assert_array_almost_equal(test_scores_batch.mean(axis=1), np.array([0.36111111, 0.25, 0.25])) assert_raises(ValueError, learning_curve, estimator, X, y, cv=cv, n_jobs=1, train_sizes=np.linspace(0.3, 1.0, 3), groups=groups) train_sizes_inc, train_scores_inc, test_scores_inc = learning_curve( estimator, X, y, cv=cv, n_jobs=1, train_sizes=np.linspace(0.3, 1.0, 3), groups=groups, shuffle=True, random_state=2, exploit_incremental_learning=True) assert_array_almost_equal(train_scores_inc.mean(axis=1), train_scores_batch.mean(axis=1)) assert_array_almost_equal(test_scores_inc.mean(axis=1), test_scores_batch.mean(axis=1)) def test_validation_curve(): X, y = make_classification(n_samples=2, n_features=1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1, random_state=0) param_range = np.linspace(0, 1, 10) with warnings.catch_warnings(record=True) as w: train_scores, test_scores = validation_curve( MockEstimatorWithParameter(), X, y, param_name="param", param_range=param_range, cv=2 ) if len(w) > 0: raise RuntimeError("Unexpected warning: %r" % w[0].message) assert_array_almost_equal(train_scores.mean(axis=1), param_range) assert_array_almost_equal(test_scores.mean(axis=1), 1 - param_range) def test_validation_curve_cv_splits_consistency(): n_samples = 100 n_splits = 5 X, y = make_classification(n_samples=100, random_state=0) scores1 = validation_curve(SVC(kernel='linear', random_state=0), X, y, 'C', [0.1, 0.1, 0.2, 0.2], cv=OneTimeSplitter(n_splits=n_splits, n_samples=n_samples)) # The OneTimeSplitter is a non-re-entrant cv splitter. Unless, the # `split` is called for each parameter, the following should produce # identical results for param setting 1 and param setting 2 as both have # the same C value. assert_array_almost_equal(*np.vsplit(np.hstack(scores1)[(0, 2, 1, 3), :], 2)) scores2 = validation_curve(SVC(kernel='linear', random_state=0), X, y, 'C', [0.1, 0.1, 0.2, 0.2], cv=KFold(n_splits=n_splits, shuffle=True)) # For scores2, compare the 1st and 2nd parameter's scores # (Since the C value for 1st two param setting is 0.1, they must be # consistent unless the train test folds differ between the param settings) assert_array_almost_equal(*np.vsplit(np.hstack(scores2)[(0, 2, 1, 3), :], 2)) scores3 = validation_curve(SVC(kernel='linear', random_state=0), X, y, 'C', [0.1, 0.1, 0.2, 0.2], cv=KFold(n_splits=n_splits)) # OneTimeSplitter is basically unshuffled KFold(n_splits=5). Sanity check. assert_array_almost_equal(np.array(scores3), np.array(scores1)) def test_check_is_permutation(): rng = np.random.RandomState(0) p = np.arange(100) rng.shuffle(p) assert_true(_check_is_permutation(p, 100)) assert_false(_check_is_permutation(np.delete(p, 23), 100)) p[0] = 23 assert_false(_check_is_permutation(p, 100)) # Check if the additional duplicate indices are caught assert_false(_check_is_permutation(np.hstack((p, 0)), 100)) def test_cross_val_predict_sparse_prediction(): # check that cross_val_predict gives same result for sparse and dense input X, y = make_multilabel_classification(n_classes=2, n_labels=1, allow_unlabeled=False, return_indicator=True, random_state=1) X_sparse = csr_matrix(X) y_sparse = csr_matrix(y) classif = OneVsRestClassifier(SVC(kernel='linear')) preds = cross_val_predict(classif, X, y, cv=10) preds_sparse = cross_val_predict(classif, X_sparse, y_sparse, cv=10) preds_sparse = preds_sparse.toarray() assert_array_almost_equal(preds_sparse, preds) def test_cross_val_predict_with_method(): iris = load_iris() X, y = iris.data, iris.target X, y = shuffle(X, y, random_state=0) classes = len(set(y)) kfold = KFold(len(iris.target)) methods = ['decision_function', 'predict_proba', 'predict_log_proba'] for method in methods: est = LogisticRegression() predictions = cross_val_predict(est, X, y, method=method) assert_equal(len(predictions), len(y)) expected_predictions = np.zeros([len(y), classes]) func = getattr(est, method) # Naive loop (should be same as cross_val_predict): for train, test in kfold.split(X, y): est.fit(X[train], y[train]) expected_predictions[test] = func(X[test]) predictions = cross_val_predict(est, X, y, method=method, cv=kfold) assert_array_almost_equal(expected_predictions, predictions) def test_score_memmap(): # Ensure a scalar score of memmap type is accepted iris = load_iris() X, y = iris.data, iris.target clf = MockClassifier() tf = tempfile.NamedTemporaryFile(mode='wb', delete=False) tf.write(b'Hello world!!!!!') tf.close() scores = np.memmap(tf.name, dtype=np.float64) score = np.memmap(tf.name, shape=(), mode='r', dtype=np.float64) try: cross_val_score(clf, X, y, scoring=lambda est, X, y: score) # non-scalar should still fail assert_raises(ValueError, cross_val_score, clf, X, y, scoring=lambda est, X, y: scores) finally: # Best effort to release the mmap file handles before deleting the # backing file under Windows scores, score = None, None for _ in range(3): try: os.unlink(tf.name) break except WindowsError: sleep(1.) def test_permutation_test_score_pandas(): # check permutation_test_score doesn't destroy pandas dataframe types = [(MockDataFrame, MockDataFrame)] try: from pandas import Series, DataFrame types.append((Series, DataFrame)) except ImportError: pass for TargetType, InputFeatureType in types: # X dataframe, y series iris = load_iris() X, y = iris.data, iris.target X_df, y_ser = InputFeatureType(X), TargetType(y) check_df = lambda x: isinstance(x, InputFeatureType) check_series = lambda x: isinstance(x, TargetType) clf = CheckingClassifier(check_X=check_df, check_y=check_series) permutation_test_score(clf, X_df, y_ser)
bsd-3-clause
JingZhou0404/phantomjs
src/qt/qtbase/src/3rdparty/freetype/src/tools/docmaker/content.py
292
17668
# Content (c) 2002, 2004, 2006, 2007, 2008, 2009 # David Turner <david@freetype.org> # # This file contains routines used to parse the content of documentation # comment blocks and build more structured objects out of them. # from sources import * from utils import * import string, re # this regular expression is used to detect code sequences. these # are simply code fragments embedded in '{' and '}' like in: # # { # x = y + z; # if ( zookoo == 2 ) # { # foobar(); # } # } # # note that indentation of the starting and ending accolades must be # exactly the same. the code sequence can contain accolades at greater # indentation # re_code_start = re.compile( r"(\s*){\s*$" ) re_code_end = re.compile( r"(\s*)}\s*$" ) # this regular expression is used to isolate identifiers from # other text # re_identifier = re.compile( r'(\w*)' ) # we collect macros ending in `_H'; while outputting the object data, we use # this info together with the object's file location to emit the appropriate # header file macro and name before the object itself # re_header_macro = re.compile( r'^#define\s{1,}(\w{1,}_H)\s{1,}<(.*)>' ) ############################################################################# # # The DocCode class is used to store source code lines. # # 'self.lines' contains a set of source code lines that will be dumped as # HTML in a <PRE> tag. # # The object is filled line by line by the parser; it strips the leading # "margin" space from each input line before storing it in 'self.lines'. # class DocCode: def __init__( self, margin, lines ): self.lines = [] self.words = None # remove margin spaces for l in lines: if string.strip( l[:margin] ) == "": l = l[margin:] self.lines.append( l ) def dump( self, prefix = "", width = 60 ): lines = self.dump_lines( 0, width ) for l in lines: print prefix + l def dump_lines( self, margin = 0, width = 60 ): result = [] for l in self.lines: result.append( " " * margin + l ) return result ############################################################################# # # The DocPara class is used to store "normal" text paragraph. # # 'self.words' contains the list of words that make up the paragraph # class DocPara: def __init__( self, lines ): self.lines = None self.words = [] for l in lines: l = string.strip( l ) self.words.extend( string.split( l ) ) def dump( self, prefix = "", width = 60 ): lines = self.dump_lines( 0, width ) for l in lines: print prefix + l def dump_lines( self, margin = 0, width = 60 ): cur = "" # current line col = 0 # current width result = [] for word in self.words: ln = len( word ) if col > 0: ln = ln + 1 if col + ln > width: result.append( " " * margin + cur ) cur = word col = len( word ) else: if col > 0: cur = cur + " " cur = cur + word col = col + ln if col > 0: result.append( " " * margin + cur ) return result ############################################################################# # # The DocField class is used to store a list containing either DocPara or # DocCode objects. Each DocField also has an optional "name" which is used # when the object corresponds to a field or value definition # class DocField: def __init__( self, name, lines ): self.name = name # can be None for normal paragraphs/sources self.items = [] # list of items mode_none = 0 # start parsing mode mode_code = 1 # parsing code sequences mode_para = 3 # parsing normal paragraph margin = -1 # current code sequence indentation cur_lines = [] # now analyze the markup lines to see if they contain paragraphs, # code sequences or fields definitions # start = 0 mode = mode_none for l in lines: # are we parsing a code sequence ? if mode == mode_code: m = re_code_end.match( l ) if m and len( m.group( 1 ) ) <= margin: # that's it, we finished the code sequence code = DocCode( 0, cur_lines ) self.items.append( code ) margin = -1 cur_lines = [] mode = mode_none else: # nope, continue the code sequence cur_lines.append( l[margin:] ) else: # start of code sequence ? m = re_code_start.match( l ) if m: # save current lines if cur_lines: para = DocPara( cur_lines ) self.items.append( para ) cur_lines = [] # switch to code extraction mode margin = len( m.group( 1 ) ) mode = mode_code else: if not string.split( l ) and cur_lines: # if the line is empty, we end the current paragraph, # if any para = DocPara( cur_lines ) self.items.append( para ) cur_lines = [] else: # otherwise, simply add the line to the current # paragraph cur_lines.append( l ) if mode == mode_code: # unexpected end of code sequence code = DocCode( margin, cur_lines ) self.items.append( code ) elif cur_lines: para = DocPara( cur_lines ) self.items.append( para ) def dump( self, prefix = "" ): if self.field: print prefix + self.field + " ::" prefix = prefix + "----" first = 1 for p in self.items: if not first: print "" p.dump( prefix ) first = 0 def dump_lines( self, margin = 0, width = 60 ): result = [] nl = None for p in self.items: if nl: result.append( "" ) result.extend( p.dump_lines( margin, width ) ) nl = 1 return result # this regular expression is used to detect field definitions # re_field = re.compile( r"\s*(\w*|\w(\w|\.)*\w)\s*::" ) class DocMarkup: def __init__( self, tag, lines ): self.tag = string.lower( tag ) self.fields = [] cur_lines = [] field = None mode = 0 for l in lines: m = re_field.match( l ) if m: # we detected the start of a new field definition # first, save the current one if cur_lines: f = DocField( field, cur_lines ) self.fields.append( f ) cur_lines = [] field = None field = m.group( 1 ) # record field name ln = len( m.group( 0 ) ) l = " " * ln + l[ln:] cur_lines = [l] else: cur_lines.append( l ) if field or cur_lines: f = DocField( field, cur_lines ) self.fields.append( f ) def get_name( self ): try: return self.fields[0].items[0].words[0] except: return None def get_start( self ): try: result = "" for word in self.fields[0].items[0].words: result = result + " " + word return result[1:] except: return "ERROR" def dump( self, margin ): print " " * margin + "<" + self.tag + ">" for f in self.fields: f.dump( " " ) print " " * margin + "</" + self.tag + ">" class DocChapter: def __init__( self, block ): self.block = block self.sections = [] if block: self.name = block.name self.title = block.get_markup_words( "title" ) self.order = block.get_markup_words( "sections" ) else: self.name = "Other" self.title = string.split( "Miscellaneous" ) self.order = [] class DocSection: def __init__( self, name = "Other" ): self.name = name self.blocks = {} self.block_names = [] # ordered block names in section self.defs = [] self.abstract = "" self.description = "" self.order = [] self.title = "ERROR" self.chapter = None def add_def( self, block ): self.defs.append( block ) def add_block( self, block ): self.block_names.append( block.name ) self.blocks[block.name] = block def process( self ): # look up one block that contains a valid section description for block in self.defs: title = block.get_markup_text( "title" ) if title: self.title = title self.abstract = block.get_markup_words( "abstract" ) self.description = block.get_markup_items( "description" ) self.order = block.get_markup_words( "order" ) return def reorder( self ): self.block_names = sort_order_list( self.block_names, self.order ) class ContentProcessor: def __init__( self ): """initialize a block content processor""" self.reset() self.sections = {} # dictionary of documentation sections self.section = None # current documentation section self.chapters = [] # list of chapters self.headers = {} # dictionary of header macros def set_section( self, section_name ): """set current section during parsing""" if not self.sections.has_key( section_name ): section = DocSection( section_name ) self.sections[section_name] = section self.section = section else: self.section = self.sections[section_name] def add_chapter( self, block ): chapter = DocChapter( block ) self.chapters.append( chapter ) def reset( self ): """reset the content processor for a new block""" self.markups = [] self.markup = None self.markup_lines = [] def add_markup( self ): """add a new markup section""" if self.markup and self.markup_lines: # get rid of last line of markup if it's empty marks = self.markup_lines if len( marks ) > 0 and not string.strip( marks[-1] ): self.markup_lines = marks[:-1] m = DocMarkup( self.markup, self.markup_lines ) self.markups.append( m ) self.markup = None self.markup_lines = [] def process_content( self, content ): """process a block content and return a list of DocMarkup objects corresponding to it""" markup = None markup_lines = [] first = 1 for line in content: found = None for t in re_markup_tags: m = t.match( line ) if m: found = string.lower( m.group( 1 ) ) prefix = len( m.group( 0 ) ) line = " " * prefix + line[prefix:] # remove markup from line break # is it the start of a new markup section ? if found: first = 0 self.add_markup() # add current markup content self.markup = found if len( string.strip( line ) ) > 0: self.markup_lines.append( line ) elif first == 0: self.markup_lines.append( line ) self.add_markup() return self.markups def parse_sources( self, source_processor ): blocks = source_processor.blocks count = len( blocks ) for n in range( count ): source = blocks[n] if source.content: # this is a documentation comment, we need to catch # all following normal blocks in the "follow" list # follow = [] m = n + 1 while m < count and not blocks[m].content: follow.append( blocks[m] ) m = m + 1 doc_block = DocBlock( source, follow, self ) def finish( self ): # process all sections to extract their abstract, description # and ordered list of items # for sec in self.sections.values(): sec.process() # process chapters to check that all sections are correctly # listed there for chap in self.chapters: for sec in chap.order: if self.sections.has_key( sec ): section = self.sections[sec] section.chapter = chap section.reorder() chap.sections.append( section ) else: sys.stderr.write( "WARNING: chapter '" + \ chap.name + "' in " + chap.block.location() + \ " lists unknown section '" + sec + "'\n" ) # check that all sections are in a chapter # others = [] for sec in self.sections.values(): if not sec.chapter: others.append( sec ) # create a new special chapter for all remaining sections # when necessary # if others: chap = DocChapter( None ) chap.sections = others self.chapters.append( chap ) class DocBlock: def __init__( self, source, follow, processor ): processor.reset() self.source = source self.code = [] self.type = "ERRTYPE" self.name = "ERRNAME" self.section = processor.section self.markups = processor.process_content( source.content ) # compute block type from first markup tag try: self.type = self.markups[0].tag except: pass # compute block name from first markup paragraph try: markup = self.markups[0] para = markup.fields[0].items[0] name = para.words[0] m = re_identifier.match( name ) if m: name = m.group( 1 ) self.name = name except: pass if self.type == "section": # detect new section starts processor.set_section( self.name ) processor.section.add_def( self ) elif self.type == "chapter": # detect new chapter processor.add_chapter( self ) else: processor.section.add_block( self ) # now, compute the source lines relevant to this documentation # block. We keep normal comments in for obvious reasons (??) source = [] for b in follow: if b.format: break for l in b.lines: # collect header macro definitions m = re_header_macro.match( l ) if m: processor.headers[m.group( 2 )] = m.group( 1 ); # we use "/* */" as a separator if re_source_sep.match( l ): break source.append( l ) # now strip the leading and trailing empty lines from the sources start = 0 end = len( source ) - 1 while start < end and not string.strip( source[start] ): start = start + 1 while start < end and not string.strip( source[end] ): end = end - 1 if start == end and not string.strip( source[start] ): self.code = [] else: self.code = source[start:end + 1] def location( self ): return self.source.location() def get_markup( self, tag_name ): """return the DocMarkup corresponding to a given tag in a block""" for m in self.markups: if m.tag == string.lower( tag_name ): return m return None def get_markup_name( self, tag_name ): """return the name of a given primary markup in a block""" try: m = self.get_markup( tag_name ) return m.get_name() except: return None def get_markup_words( self, tag_name ): try: m = self.get_markup( tag_name ) return m.fields[0].items[0].words except: return [] def get_markup_text( self, tag_name ): result = self.get_markup_words( tag_name ) return string.join( result ) def get_markup_items( self, tag_name ): try: m = self.get_markup( tag_name ) return m.fields[0].items except: return None # eof
bsd-3-clause
frictionlessdata/dpr-api
app/auth/authorization.py
2
4137
# -*- coding: utf-8 -*- from __future__ import division from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals from app.package.models import Package from app.profile.models import User, Publisher, PublisherUser, UserRoleEnum roles_action_mappings = { "Package": { "Owner": ["Package::Read", "Package::Create", "Package::Delete", "Package::Undelete", "Package::Purge", "Package::Update", "Package::Tag"], "Editor": ["Package::Read", "Package::Create", "Package::Delete", "Package::Undelete", "Package::Update", "Package::Tag"], "Viewer": ["Package::Read"] }, "Publisher": { "Owner": ["Publisher::AddMember", "Publisher::RemoveMember", "Publisher::Create", "Publisher::Read", "Publisher::Delete", "Publisher::Update", "Publisher::ViewMemberList"], "Editor": ["Publisher::ViewMemberList", "Publisher::AddMember", "Publisher::RemoveMember", "Publisher::Read"], "Viewer": ["Publisher::Read"] }, "System": { "LoggedIn": ["Package::Create", "Publisher::Create"], "Anonymous": ["Package::Read", "Publisher::Read"], "Sysadmin": ["Package::Read", "Package::Create", "Package::Delete", "Package::Undelete", "Package::Purge", "Package::Update", "Package::Tag", "Publisher::AddMember", "Publisher::RemoveMember", "Publisher::Create", "Publisher::Read", "Publisher::Delete", "Publisher::Update", "Publisher::ViewMemberList"] } } def is_authorize(user_id, entity, action): actions = get_user_actions(user_id, entity) return action in actions def get_user_actions(user_id, entity): local_roles = [] user = None if user_id is not None: user = User.query.get(user_id) if user is None: if entity is not None and entity.private is False: local_roles.extend(roles_action_mappings['System']['Anonymous']) else: if user.sysadmin is True: local_roles.extend(roles_action_mappings['System']['Sysadmin']) else: if isinstance(entity, Publisher): local_roles.extend(get_publisher_roles(user_id=user_id, entity=entity)) if isinstance(entity, Package): local_roles.extend(get_package_roles(user_id=user_id, entity=entity)) elif entity is None: local_roles.extend(roles_action_mappings['System']['LoggedIn']) return local_roles def get_publisher_roles(user_id, entity): role_parent = 'Publisher' publisher_roles = [] try: user_role = PublisherUser.query.join(User).join(Publisher)\ .filter(User.id == user_id, Publisher.name == entity.name).one() if user_role.role == UserRoleEnum.owner: publisher_roles.extend(roles_action_mappings[role_parent]['Owner']) elif user_role.role == UserRoleEnum.member: publisher_roles.extend(roles_action_mappings[role_parent]['Editor']) except: publisher_roles.extend(roles_action_mappings['System']['LoggedIn']) if entity.private is not True: publisher_roles.extend(roles_action_mappings[role_parent]['Viewer']) return publisher_roles def get_package_roles(user_id, entity): role_parent = 'Package' package_roles = [] try: user_role = PublisherUser.query.join(User).join(Publisher)\ .filter(User.id == user_id, Publisher.name == entity.publisher.name)\ .one() if user_role.role == UserRoleEnum.owner: package_roles.extend(roles_action_mappings[role_parent]['Owner']) elif user_role.role == UserRoleEnum.member: package_roles.extend(roles_action_mappings[role_parent]['Editor']) except: package_roles.extend(roles_action_mappings['System']['LoggedIn']) if entity.private is not True: package_roles.extend(roles_action_mappings[role_parent]['Viewer']) return package_roles
mit
jiangangwu/517shangke
app/api_1_0/authentication.py
1
1239
from flask import g, jsonify from flask_httpauth import HTTPBasicAuth from ..models import User, AnonymousUser from . import api from .errors import unauthorized, forbidden auth = HTTPBasicAuth() @auth.verify_password def verify_password(email_or_token, password): if email_or_token == '': g.current_user = AnonymousUser() return True if password == '': g.current_user = User.verify_auth_token(email_or_token) g.token_used = True return g.current_user is not None user = User.query.filter_by(email=email_or_token).first() if not user: return False g.current_user = user g.token_used = False return user.verify_password(password) @auth.error_handler def auth_error(): return unauthorized('并无资格') @api.before_request @auth.login_required def before_request(): if not g.current_user.is_anonymous and \ not g.current_user.confirmed: return forbidden('帐户未确认') @api.route('/token') def get_token(): if g.current_user.is_anonymous or g.token_used: return unauthorized('Invalid credentials') return jsonify({'token': g.current_user.generate_auth_token( expiration=3600), 'expiration': 3600})
gpl-3.0
joshuajan/odoo
openerp/addons/base/tests/test_res_lang.py
377
2104
import unittest2 import openerp.tests.common as common class test_res_lang(common.TransactionCase): def test_00_intersperse(self): from openerp.addons.base.res.res_lang import intersperse assert intersperse("", []) == ("", 0) assert intersperse("0", []) == ("0", 0) assert intersperse("012", []) == ("012", 0) assert intersperse("1", []) == ("1", 0) assert intersperse("12", []) == ("12", 0) assert intersperse("123", []) == ("123", 0) assert intersperse("1234", []) == ("1234", 0) assert intersperse("123456789", []) == ("123456789", 0) assert intersperse("&ab%#@1", []) == ("&ab%#@1", 0) assert intersperse("0", []) == ("0", 0) assert intersperse("0", [1]) == ("0", 0) assert intersperse("0", [2]) == ("0", 0) assert intersperse("0", [200]) == ("0", 0) assert intersperse("12345678", [1], '.') == ('1234567.8', 1) assert intersperse("12345678", [1], '.') == ('1234567.8', 1) assert intersperse("12345678", [2], '.') == ('123456.78', 1) assert intersperse("12345678", [2,1], '.') == ('12345.6.78', 2) assert intersperse("12345678", [2,0], '.') == ('12.34.56.78', 3) assert intersperse("12345678", [-1,2], '.') == ('12345678', 0) assert intersperse("12345678", [2,-1], '.') == ('123456.78', 1) assert intersperse("12345678", [2,0,1], '.') == ('12.34.56.78', 3) assert intersperse("12345678", [2,0,0], '.') == ('12.34.56.78', 3) assert intersperse("12345678", [2,0,-1], '.') == ('12.34.56.78', 3) assert intersperse("12345678", [3,3,3,3], '.') == ('12.345.678', 2) assert intersperse("abc1234567xy", [2], '.') == ('abc1234567.xy', 1) assert intersperse("abc1234567xy8", [2], '.') == ('abc1234567x.y8', 1) # ... w.r.t. here. assert intersperse("abc12", [3], '.') == ('abc12', 0) assert intersperse("abc12", [2], '.') == ('abc12', 0) assert intersperse("abc12", [1], '.') == ('abc1.2', 1) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
CrankWheel/grit-i18n
grit/format/policy_templates/writers/admx_writer_unittest.py
41
21095
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Unittests for grit.format.policy_templates.writers.admx_writer.""" import os import sys import unittest if __name__ == '__main__': sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..')) from grit.format.policy_templates.writers import admx_writer from grit.format.policy_templates.writers import xml_writer_base_unittest from xml.dom import minidom class AdmxWriterUnittest(xml_writer_base_unittest.XmlWriterBaseTest): def _CreateDocumentElement(self): dom_impl = minidom.getDOMImplementation('') doc = dom_impl.createDocument(None, 'root', None) return doc.documentElement def setUp(self): # Writer configuration. This dictionary contains parameter used by the ADMX # Writer config = { 'win_group_policy_class': 'TestClass', 'win_supported_os': 'SUPPORTED_TESTOS', 'win_reg_mandatory_key_name': 'Software\\Policies\\Test', 'win_reg_recommended_key_name': 'Software\\Policies\\Test\\Recommended', 'win_mandatory_category_path': ['test_category'], 'win_recommended_category_path': ['test_recommended_category'], 'admx_namespace': 'ADMXWriter.Test.Namespace', 'admx_prefix': 'test_prefix', 'build': 'test_product', } self.writer = admx_writer.GetWriter(config) self.writer.Init() def _GetPoliciesElement(self, doc): node_list = doc.getElementsByTagName('policies') self.assertTrue(node_list.length == 1) return node_list.item(0) def _GetCategoriesElement(self, doc): node_list = doc.getElementsByTagName('categories') self.assertTrue(node_list.length == 1) return node_list.item(0) def testEmpty(self): self.writer.BeginTemplate() self.writer.EndTemplate() output = self.writer.GetTemplateText() expected_output = ( '<?xml version="1.0" ?>\n' '<policyDefinitions revision="1.0" schemaVersion="1.0">\n' ' <policyNamespaces>\n' ' <target namespace="ADMXWriter.Test.Namespace"' ' prefix="test_prefix"/>\n' ' <using namespace="Microsoft.Policies.Windows" prefix="windows"/>\n' ' </policyNamespaces>\n' ' <resources minRequiredRevision="1.0"/>\n' ' <supportedOn>\n' ' <definitions>\n' ' <definition displayName="' '$(string.SUPPORTED_TESTOS)" name="SUPPORTED_TESTOS"/>\n' ' </definitions>\n' ' </supportedOn>\n' ' <categories>\n' ' <category displayName="$(string.test_category)"' ' name="test_category"/>\n' ' <category displayName="$(string.test_recommended_category)"' ' name="test_recommended_category"/>\n' ' </categories>\n' ' <policies/>\n' '</policyDefinitions>') self.AssertXMLEquals(output, expected_output) def testEmptyVersion(self): self.writer.config['version'] = '39.0.0.0' self.writer.BeginTemplate() self.writer.EndTemplate() output = self.writer.GetTemplateText() expected_output = ( '<?xml version="1.0" ?>\n' '<policyDefinitions revision="1.0" schemaVersion="1.0">\n' ' <!--test_product version: 39.0.0.0-->\n' ' <policyNamespaces>\n' ' <target namespace="ADMXWriter.Test.Namespace"' ' prefix="test_prefix"/>\n' ' <using namespace="Microsoft.Policies.Windows" prefix="windows"/>\n' ' </policyNamespaces>\n' ' <resources minRequiredRevision="1.0"/>\n' ' <supportedOn>\n' ' <definitions>\n' ' <definition displayName="' '$(string.SUPPORTED_TESTOS)" name="SUPPORTED_TESTOS"/>\n' ' </definitions>\n' ' </supportedOn>\n' ' <categories>\n' ' <category displayName="$(string.test_category)"' ' name="test_category"/>\n' ' <category displayName="$(string.test_recommended_category)"' ' name="test_recommended_category"/>\n' ' </categories>\n' ' <policies/>\n' '</policyDefinitions>') self.AssertXMLEquals(output, expected_output) def testEmptyPolicyGroup(self): empty_policy_group = { 'name': 'PolicyGroup', 'policies': [] } # Initialize writer to write a policy group. self.writer.BeginTemplate() # Write policy group self.writer.BeginPolicyGroup(empty_policy_group) self.writer.EndPolicyGroup() output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = '' self.AssertXMLEquals(output, expected_output) output = self.GetXMLOfChildren( self._GetCategoriesElement(self.writer._doc)) expected_output = ( '<category displayName="$(string.test_category)"' ' name="test_category"/>\n' '<category displayName="$(string.test_recommended_category)"' ' name="test_recommended_category"/>\n' '<category displayName="$(string.PolicyGroup_group)"' ' name="PolicyGroup">\n' ' <parentCategory ref="test_category"/>\n' '</category>') self.AssertXMLEquals(output, expected_output) def testPolicyGroup(self): empty_policy_group = { 'name': 'PolicyGroup', 'policies': [ {'name': 'PolicyStub2', 'type': 'main'}, {'name': 'PolicyStub1', 'type': 'main'}, ] } # Initialize writer to write a policy group. self.writer.BeginTemplate() # Write policy group self.writer.BeginPolicyGroup(empty_policy_group) self.writer.EndPolicyGroup() output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = '' self.AssertXMLEquals(output, expected_output) output = self.GetXMLOfChildren( self._GetCategoriesElement(self.writer._doc)) expected_output = ( '<category displayName="$(string.test_category)"' ' name="test_category"/>\n' '<category displayName="$(string.test_recommended_category)"' ' name="test_recommended_category"/>\n' '<category displayName="$(string.PolicyGroup_group)"' ' name="PolicyGroup">\n' ' <parentCategory ref="test_category"/>\n' '</category>') self.AssertXMLEquals(output, expected_output) def _initWriterForPolicy(self, writer, policy): '''Initializes the writer to write the given policy next. ''' policy_group = { 'name': 'PolicyGroup', 'policies': [policy] } writer.BeginTemplate() writer.BeginPolicyGroup(policy_group) def testMainPolicy(self): main_policy = { 'name': 'DummyMainPolicy', 'type': 'main', } self._initWriterForPolicy(self.writer, main_policy) self.writer.WritePolicy(main_policy) output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = ( '<policy class="TestClass" displayName="$(string.DummyMainPolicy)"' ' explainText="$(string.DummyMainPolicy_Explain)"' ' key="Software\\Policies\\Test" name="DummyMainPolicy"' ' presentation="$(presentation.DummyMainPolicy)"' ' valueName="DummyMainPolicy">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <enabledValue>\n' ' <decimal value="1"/>\n' ' </enabledValue>\n' ' <disabledValue>\n' ' <decimal value="0"/>\n' ' </disabledValue>\n' '</policy>') self.AssertXMLEquals(output, expected_output) def testRecommendedPolicy(self): main_policy = { 'name': 'DummyMainPolicy', 'type': 'main', } policy_group = { 'name': 'PolicyGroup', 'policies': [main_policy], } self.writer.BeginTemplate() self.writer.BeginRecommendedPolicyGroup(policy_group) self.writer.WriteRecommendedPolicy(main_policy) output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = ( '<policy class="TestClass" displayName="$(string.DummyMainPolicy)"' ' explainText="$(string.DummyMainPolicy_Explain)"' ' key="Software\\Policies\\Test\\Recommended"' ' name="DummyMainPolicy_recommended"' ' presentation="$(presentation.DummyMainPolicy)"' ' valueName="DummyMainPolicy">\n' ' <parentCategory ref="PolicyGroup_recommended"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <enabledValue>\n' ' <decimal value="1"/>\n' ' </enabledValue>\n' ' <disabledValue>\n' ' <decimal value="0"/>\n' ' </disabledValue>\n' '</policy>') self.AssertXMLEquals(output, expected_output) def testRecommendedOnlyPolicy(self): main_policy = { 'name': 'DummyMainPolicy', 'type': 'main', 'features': { 'can_be_recommended': True, 'can_be_mandatory': False, } } policy_group = { 'name': 'PolicyGroup', 'policies': [main_policy], } self.writer.BeginTemplate() self.writer.BeginRecommendedPolicyGroup(policy_group) self.writer.WritePolicy(main_policy) self.writer.WriteRecommendedPolicy(main_policy) output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = ( '<policy class="TestClass" displayName="$(string.DummyMainPolicy)"' ' explainText="$(string.DummyMainPolicy_Explain)"' ' key="Software\\Policies\\Test\\Recommended"' ' name="DummyMainPolicy_recommended"' ' presentation="$(presentation.DummyMainPolicy)"' ' valueName="DummyMainPolicy">\n' ' <parentCategory ref="PolicyGroup_recommended"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <enabledValue>\n' ' <decimal value="1"/>\n' ' </enabledValue>\n' ' <disabledValue>\n' ' <decimal value="0"/>\n' ' </disabledValue>\n' '</policy>') self.AssertXMLEquals(output, expected_output) def testStringPolicy(self): string_policy = { 'name': 'SampleStringPolicy', 'type': 'string', } self._initWriterForPolicy(self.writer, string_policy) self.writer.WritePolicy(string_policy) output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = ( '<policy class="TestClass" displayName="$(string.SampleStringPolicy)"' ' explainText="$(string.SampleStringPolicy_Explain)"' ' key="Software\\Policies\\Test" name="SampleStringPolicy"' ' presentation="$(presentation.SampleStringPolicy)">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <elements>\n' ' <text id="SampleStringPolicy" maxLength="1000000"' ' valueName="SampleStringPolicy"/>\n' ' </elements>\n' '</policy>') self.AssertXMLEquals(output, expected_output) def testIntPolicy(self): int_policy = { 'name': 'SampleIntPolicy', 'type': 'int', } self._initWriterForPolicy(self.writer, int_policy) self.writer.WritePolicy(int_policy) output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = ( '<policy class="TestClass" displayName="$(string.SampleIntPolicy)"' ' explainText="$(string.SampleIntPolicy_Explain)"' ' key="Software\\Policies\\Test" name="SampleIntPolicy"' ' presentation="$(presentation.SampleIntPolicy)">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <elements>\n' ' <decimal id="SampleIntPolicy" maxValue="2000000000" ' 'valueName="SampleIntPolicy"/>\n' ' </elements>\n' '</policy>') self.AssertXMLEquals(output, expected_output) def testIntEnumPolicy(self): enum_policy = { 'name': 'SampleEnumPolicy', 'type': 'int-enum', 'items': [ {'name': 'item_1', 'value': 0}, {'name': 'item_2', 'value': 1}, ] } self._initWriterForPolicy(self.writer, enum_policy) self.writer.WritePolicy(enum_policy) output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = ( '<policy class="TestClass" displayName="$(string.SampleEnumPolicy)"' ' explainText="$(string.SampleEnumPolicy_Explain)"' ' key="Software\\Policies\\Test" name="SampleEnumPolicy"' ' presentation="$(presentation.SampleEnumPolicy)">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <elements>\n' ' <enum id="SampleEnumPolicy" valueName="SampleEnumPolicy">\n' ' <item displayName="$(string.item_1)">\n' ' <value>\n' ' <decimal value="0"/>\n' ' </value>\n' ' </item>\n' ' <item displayName="$(string.item_2)">\n' ' <value>\n' ' <decimal value="1"/>\n' ' </value>\n' ' </item>\n' ' </enum>\n' ' </elements>\n' '</policy>') self.AssertXMLEquals(output, expected_output) def testStringEnumPolicy(self): enum_policy = { 'name': 'SampleEnumPolicy', 'type': 'string-enum', 'items': [ {'name': 'item_1', 'value': 'one'}, {'name': 'item_2', 'value': 'two'}, ] } # This test is different than the others because it also tests that space # usage inside <string> nodes is correct. dom_impl = minidom.getDOMImplementation('') self.writer._doc = dom_impl.createDocument(None, 'policyDefinitions', None) self.writer._active_policies_elem = self.writer._doc.documentElement self.writer._active_mandatory_policy_group_name = 'PolicyGroup' self.writer.WritePolicy(enum_policy) output = self.writer.GetTemplateText() expected_output = ( '<?xml version="1.0" ?>\n' '<policyDefinitions>\n' ' <policy class="TestClass" displayName="$(string.SampleEnumPolicy)"' ' explainText="$(string.SampleEnumPolicy_Explain)"' ' key="Software\\Policies\\Test" name="SampleEnumPolicy"' ' presentation="$(presentation.SampleEnumPolicy)">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <elements>\n' ' <enum id="SampleEnumPolicy" valueName="SampleEnumPolicy">\n' ' <item displayName="$(string.item_1)">\n' ' <value>\n' ' <string>one</string>\n' ' </value>\n' ' </item>\n' ' <item displayName="$(string.item_2)">\n' ' <value>\n' ' <string>two</string>\n' ' </value>\n' ' </item>\n' ' </enum>\n' ' </elements>\n' ' </policy>\n' '</policyDefinitions>') self.AssertXMLEquals(output, expected_output) def testListPolicy(self): list_policy = { 'name': 'SampleListPolicy', 'type': 'list', } self._initWriterForPolicy(self.writer, list_policy) self.writer.WritePolicy(list_policy) output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = ( '<policy class="TestClass" displayName="$(string.SampleListPolicy)"' ' explainText="$(string.SampleListPolicy_Explain)"' ' key="Software\\Policies\\Test" name="SampleListPolicy"' ' presentation="$(presentation.SampleListPolicy)">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <elements>\n' ' <list id="SampleListPolicyDesc"' ' key="Software\Policies\Test\SampleListPolicy" valuePrefix=""/>\n' ' </elements>\n' '</policy>') self.AssertXMLEquals(output, expected_output) def testStringEnumListPolicy(self): list_policy = { 'name': 'SampleListPolicy', 'type': 'string-enum-list', 'items': [ {'name': 'item_1', 'value': 'one'}, {'name': 'item_2', 'value': 'two'}, ] } self._initWriterForPolicy(self.writer, list_policy) self.writer.WritePolicy(list_policy) output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = ( '<policy class="TestClass" displayName="$(string.SampleListPolicy)"' ' explainText="$(string.SampleListPolicy_Explain)"' ' key="Software\\Policies\\Test" name="SampleListPolicy"' ' presentation="$(presentation.SampleListPolicy)">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <elements>\n' ' <list id="SampleListPolicyDesc"' ' key="Software\Policies\Test\SampleListPolicy" valuePrefix=""/>\n' ' </elements>\n' '</policy>') self.AssertXMLEquals(output, expected_output) def testDictionaryPolicy(self): dict_policy = { 'name': 'SampleDictionaryPolicy', 'type': 'dict', } self._initWriterForPolicy(self.writer, dict_policy) self.writer.WritePolicy(dict_policy) output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc)) expected_output = ( '<policy class="TestClass" displayName="$(string.' 'SampleDictionaryPolicy)"' ' explainText="$(string.SampleDictionaryPolicy_Explain)"' ' key="Software\\Policies\\Test" name="SampleDictionaryPolicy"' ' presentation="$(presentation.SampleDictionaryPolicy)">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <elements>\n' ' <text id="SampleDictionaryPolicy" maxLength="1000000"' ' valueName="SampleDictionaryPolicy"/>\n' ' </elements>\n' '</policy>') self.AssertXMLEquals(output, expected_output) def testPlatform(self): # Test that the writer correctly chooses policies of platform Windows. self.assertTrue(self.writer.IsPolicySupported({ 'supported_on': [ {'platforms': ['win', 'zzz']}, {'platforms': ['aaa']} ] })) self.assertFalse(self.writer.IsPolicySupported({ 'supported_on': [ {'platforms': ['mac', 'linux']}, {'platforms': ['aaa']} ] })) def testStringEncodings(self): enum_policy_a = { 'name': 'SampleEnumPolicy.A', 'type': 'string-enum', 'items': [ {'name': 'tls1.2', 'value': 'tls1.2'} ] } enum_policy_b = { 'name': 'SampleEnumPolicy.B', 'type': 'string-enum', 'items': [ {'name': 'tls1.2', 'value': 'tls1.2'} ] } dom_impl = minidom.getDOMImplementation('') self.writer._doc = dom_impl.createDocument(None, 'policyDefinitions', None) self.writer._active_policies_elem = self.writer._doc.documentElement self.writer._active_mandatory_policy_group_name = 'PolicyGroup' self.writer.WritePolicy(enum_policy_a) self.writer.WritePolicy(enum_policy_b) output = self.writer.GetTemplateText() expected_output = ( '<?xml version="1.0" ?>\n' '<policyDefinitions>\n' ' <policy class="TestClass" displayName="$(string.SampleEnumPolicy_A)"' ' explainText="$(string.SampleEnumPolicy_A_Explain)"' ' key="Software\\Policies\\Test" name="SampleEnumPolicy.A"' ' presentation="$(presentation.SampleEnumPolicy.A)">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <elements>\n' ' <enum id="SampleEnumPolicy.A" valueName="SampleEnumPolicy.A">\n' ' <item displayName="$(string.tls1_2)">\n' ' <value>\n' ' <string>tls1.2</string>\n' ' </value>\n' ' </item>\n' ' </enum>\n' ' </elements>\n' ' </policy>\n' ' <policy class="TestClass" displayName="$(string.SampleEnumPolicy_B)"' ' explainText="$(string.SampleEnumPolicy_B_Explain)"' ' key="Software\\Policies\\Test" name="SampleEnumPolicy.B"' ' presentation="$(presentation.SampleEnumPolicy.B)">\n' ' <parentCategory ref="PolicyGroup"/>\n' ' <supportedOn ref="SUPPORTED_TESTOS"/>\n' ' <elements>\n' ' <enum id="SampleEnumPolicy.B" valueName="SampleEnumPolicy.B">\n' ' <item displayName="$(string.tls1_2)">\n' ' <value>\n' ' <string>tls1.2</string>\n' ' </value>\n' ' </item>\n' ' </enum>\n' ' </elements>\n' ' </policy>\n' '</policyDefinitions>') self.AssertXMLEquals(output, expected_output) if __name__ == '__main__': unittest.main()
bsd-2-clause
YannThorimbert/Thorpy-1.4
thorpy/miscgui/application.py
5
3721
"""Module defining Application class.""" import os import time import pygame _CURRENT_MENU = None _OLD_MENUS = [_CURRENT_MENU] _CURRENT_APPLICATION = None _SCREEN = None DEBUG_MODE = False SHOW_FPS = False TICK_BUSY = False USE_IMG_DICT = True _loaded = {} class Application(object): """An Application object handles the pygame variables needed to create a graphical program, such as screen, screen's size window caption and window location. """ def __init__(self, size, caption=None, icon="thorpy", center=True, flags=0): """This object handles the pygame variables needed to create a graphical program, such as screen, screen's size window caption and window location. <size> : a 2-sequence containing the size in pixels of the window to create. <caption> : the caption of the window. None means no caption. <icon> : path to the the icon image of the window. 'thorpy' : default thorpy icon 'pygame' : default pygame icon None : no icon <center> : centers the window on the computer screen. <flags> : flags passed to the pygame display surface. They can be: pygame.FULLSCREEN create a fullscreen display pygame.DOUBLEBUF recommended for HWSURFACE or OPENGL pygame.HWSURFACE hardware accelerated, only in FULLSCREEN pygame.OPENGL create an OpenGL renderable display pygame.RESIZABLE display window should be sizeable pygame.NOFRAME display window will have no border or controls """ global _SCREEN, _CURRENT_APPLICATION _CURRENT_APPLICATION = self self.size = tuple(size) self.caption = caption pygame.init() if center: os.environ['SDL_VIDEO_CENTERED'] = '1' self.set_icon(icon) screen = pygame.display.set_mode(self.size, flags) if self.caption: pygame.display.set_caption(caption) _SCREEN = screen self.default_path = "./" def set_icon(self, icon): if icon.lower() == "pygame": pass elif icon.lower() == "thorpy": from thorpy.miscgui.style import DEFAULT_ICON icon_surf = pygame.image.load(DEFAULT_ICON) pygame.display.set_icon(icon_surf) elif icon: icon_surf = load_image(DEFAULT_ICON) pygame.display.set_icon(icon_surf) else: icon_surf = pygame.Surface((1,1)) icon_surf.set_colorkey((0,0,0)) pygame.display.set_icon(icon_surf) def update(self): pygame.display.flip() def quit(self): pygame.font.quit() pygame.quit() def pause(self, fps=20): stay = True clock = pygame.time.Clock() while stay: clock.tick(fps) for e in pygame.event.get(): if e.type == pygame.constants.QUIT: pygame.font.quit() pygame.quit() exit() elif e.type == pygame.constants.KEYUP: stay = False def save_screenshot(self, path=None, name=None, note=""): from thorpy.miscgui import functions if path is None: path = self.default_path if name is None: name = time.asctime().replace(" ", "_").replace(":", "-") + ".png" functions.debug_msg("Saving screenshot as " + path + note + name) pygame.image.save(functions.get_screen(), path+note+name) def get_statistics(self): from thorpy.elements.ghost import Ghost return {"number of elements":Ghost._current_id}
mit
nightjean/Deep-Learning
tensorflow/contrib/keras/python/keras/applications/inception_v3_test.py
51
1591
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for Inception V3 application.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.keras.python import keras from tensorflow.python.platform import test class InceptionV3Test(test.TestCase): def test_with_top(self): model = keras.applications.InceptionV3(weights=None) self.assertEqual(model.output_shape, (None, 1000)) def test_no_top(self): model = keras.applications.InceptionV3(weights=None, include_top=False) self.assertEqual(model.output_shape, (None, None, None, 2048)) def test_with_pooling(self): model = keras.applications.InceptionV3(weights=None, include_top=False, pooling='avg') self.assertEqual(model.output_shape, (None, 2048)) if __name__ == '__main__': test.main()
apache-2.0
oVirt/ovirt-engine-cli
src/cli/executionmode.py
1
1112
# # Copyright (c) 2010 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class ExecutionMode(): SHELL, SCRIPT, DEFAULT, NOPAGING = range(4) def __init__(self, Type): self.value = Type def __str__(self): if self.value == ExecutionMode.SHELL: return 'SHELL' if self.value == ExecutionMode.SCRIPT: return 'SCRIPT' if self.value == ExecutionMode.DEFAULT: return 'DEFAULT' if self.value == ExecutionMode.NOPAGING: return 'NOPAGING' def __eq__(self, y): return self.value == y.value
apache-2.0
AnotherIvan/calibre
src/calibre/customize/zipplugin.py
14
11567
#!/usr/bin/env python2 # vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai from __future__ import (unicode_literals, division, absolute_import, print_function) from future_builtins import map __license__ = 'GPL v3' __copyright__ = '2011, Kovid Goyal <kovid@kovidgoyal.net>' __docformat__ = 'restructuredtext en' import os, zipfile, posixpath, importlib, threading, re, imp, sys from collections import OrderedDict from functools import partial from calibre import as_unicode from calibre.customize import (Plugin, numeric_version, platform, InvalidPlugin, PluginNotFound) # PEP 302 based plugin loading mechanism, works around the bug in zipimport in # python 2.x that prevents importing from zip files in locations whose paths # have non ASCII characters def get_resources(zfp, name_or_list_of_names): ''' Load resources from the plugin zip file :param name_or_list_of_names: List of paths to resources in the zip file using / as separator, or a single path :return: A dictionary of the form ``{name : file_contents}``. Any names that were not found in the zip file will not be present in the dictionary. If a single path is passed in the return value will be just the bytes of the resource or None if it wasn't found. ''' names = name_or_list_of_names if isinstance(names, basestring): names = [names] ans = {} with zipfile.ZipFile(zfp) as zf: for name in names: try: ans[name] = zf.read(name) except: import traceback traceback.print_exc() if len(names) == 1: ans = ans.pop(names[0], None) return ans def get_icons(zfp, name_or_list_of_names): ''' Load icons from the plugin zip file :param name_or_list_of_names: List of paths to resources in the zip file using / as separator, or a single path :return: A dictionary of the form ``{name : QIcon}``. Any names that were not found in the zip file will be null QIcons. If a single path is passed in the return value will be A QIcon. ''' from PyQt5.Qt import QIcon, QPixmap names = name_or_list_of_names ans = get_resources(zfp, names) if isinstance(names, basestring): names = [names] if ans is None: ans = {} if isinstance(ans, basestring): ans = dict([(names[0], ans)]) ians = {} for name in names: p = QPixmap() raw = ans.get(name, None) if raw: p.loadFromData(raw) ians[name] = QIcon(p) if len(names) == 1: ians = ians.pop(names[0]) return ians _translations_cache = {} def load_translations(namespace, zfp): null = object() trans = _translations_cache.get(zfp, null) if trans is None: return if trans is null: from calibre.utils.localization import get_lang lang = get_lang() if not lang or lang == 'en': # performance optimization _translations_cache[zfp] = None return with zipfile.ZipFile(zfp) as zf: try: mo = zf.read('translations/%s.mo' % lang) except KeyError: mo = None # No translations for this language present if mo is None: _translations_cache[zfp] = None return from gettext import GNUTranslations from io import BytesIO trans = _translations_cache[zfp] = GNUTranslations(BytesIO(mo)) namespace['_'] = trans.ugettext namespace['ngettext'] = trans.ungettext class PluginLoader(object): def __init__(self): self.loaded_plugins = {} self._lock = threading.RLock() self._identifier_pat = re.compile(r'[a-zA-Z][_0-9a-zA-Z]*') def _get_actual_fullname(self, fullname): parts = fullname.split('.') if parts[0] == 'calibre_plugins': if len(parts) == 1: return parts[0], None plugin_name = parts[1] with self._lock: names = self.loaded_plugins.get(plugin_name, None) if names is None: raise ImportError('No plugin named %r loaded'%plugin_name) names = names[1] fullname = '.'.join(parts[2:]) if not fullname: fullname = '__init__' if fullname in names: return fullname, plugin_name if fullname+'.__init__' in names: return fullname+'.__init__', plugin_name return None, None def find_module(self, fullname, path=None): fullname, plugin_name = self._get_actual_fullname(fullname) if fullname is None and plugin_name is None: return None return self def load_module(self, fullname): import_name, plugin_name = self._get_actual_fullname(fullname) if import_name is None and plugin_name is None: raise ImportError('No plugin named %r is loaded'%fullname) mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) mod.__file__ = "<calibre Plugin Loader>" mod.__loader__ = self if import_name.endswith('.__init__') or import_name in ('__init__', 'calibre_plugins'): # We have a package mod.__path__ = [] if plugin_name is not None: # We have some actual code to load with self._lock: zfp, names = self.loaded_plugins.get(plugin_name, (None, None)) if names is None: raise ImportError('No plugin named %r loaded'%plugin_name) zinfo = names.get(import_name, None) if zinfo is None: raise ImportError('Plugin %r has no module named %r' % (plugin_name, import_name)) with zipfile.ZipFile(zfp) as zf: try: code = zf.read(zinfo) except: # Maybe the zip file changed from under us code = zf.read(zinfo.filename) compiled = compile(code, 'calibre_plugins.%s.%s'%(plugin_name, import_name), 'exec', dont_inherit=True) mod.__dict__['get_resources'] = partial(get_resources, zfp) mod.__dict__['get_icons'] = partial(get_icons, zfp) mod.__dict__['load_translations'] = partial(load_translations, mod.__dict__, zfp) exec compiled in mod.__dict__ return mod def load(self, path_to_zip_file): if not os.access(path_to_zip_file, os.R_OK): raise PluginNotFound('Cannot access %r'%path_to_zip_file) with zipfile.ZipFile(path_to_zip_file) as zf: plugin_name = self._locate_code(zf, path_to_zip_file) try: ans = None plugin_module = 'calibre_plugins.%s'%plugin_name m = sys.modules.get(plugin_module, None) if m is not None: reload(m) else: m = importlib.import_module(plugin_module) plugin_classes = [] for obj in m.__dict__.itervalues(): if isinstance(obj, type) and issubclass(obj, Plugin) and \ obj.name != 'Trivial Plugin': plugin_classes.append(obj) if not plugin_classes: raise InvalidPlugin('No plugin class found in %s:%s'%( as_unicode(path_to_zip_file), plugin_name)) if len(plugin_classes) > 1: plugin_classes.sort(key=lambda c:(getattr(c, '__module__', None) or '').count('.')) ans = plugin_classes[0] if ans.minimum_calibre_version > numeric_version: raise InvalidPlugin( 'The plugin at %s needs a version of calibre >= %s' % (as_unicode(path_to_zip_file), '.'.join(map(unicode, ans.minimum_calibre_version)))) if platform not in ans.supported_platforms: raise InvalidPlugin( 'The plugin at %s cannot be used on %s' % (as_unicode(path_to_zip_file), platform)) return ans except: with self._lock: del self.loaded_plugins[plugin_name] raise def _locate_code(self, zf, path_to_zip_file): names = [x if isinstance(x, unicode) else x.decode('utf-8') for x in zf.namelist()] names = [x[1:] if x[0] == '/' else x for x in names] plugin_name = None for name in names: name, ext = posixpath.splitext(name) if name.startswith('plugin-import-name-') and ext == '.txt': plugin_name = name.rpartition('-')[-1] if plugin_name is None: c = 0 while True: c += 1 plugin_name = 'dummy%d'%c if plugin_name not in self.loaded_plugins: break else: if self._identifier_pat.match(plugin_name) is None: raise InvalidPlugin(( 'The plugin at %r uses an invalid import name: %r' % (path_to_zip_file, plugin_name))) pynames = [x for x in names if x.endswith('.py')] candidates = [posixpath.dirname(x) for x in pynames if x.endswith('/__init__.py')] candidates.sort(key=lambda x: x.count('/')) valid_packages = set() for candidate in candidates: parts = candidate.split('/') parent = '.'.join(parts[:-1]) if parent and parent not in valid_packages: continue valid_packages.add('.'.join(parts)) names = OrderedDict() for candidate in pynames: parts = posixpath.splitext(candidate)[0].split('/') package = '.'.join(parts[:-1]) if package and package not in valid_packages: continue name = '.'.join(parts) names[name] = zf.getinfo(candidate) # Legacy plugins if '__init__' not in names: for name in list(names.iterkeys()): if '.' not in name and name.endswith('plugin'): names['__init__'] = names[name] break if '__init__' not in names: raise InvalidPlugin(('The plugin in %r is invalid. It does not ' 'contain a top-level __init__.py file') % path_to_zip_file) with self._lock: self.loaded_plugins[plugin_name] = (path_to_zip_file, names) return plugin_name loader = PluginLoader() sys.meta_path.insert(0, loader) if __name__ == '__main__': from tempfile import NamedTemporaryFile from calibre.customize.ui import add_plugin from calibre import CurrentDir path = sys.argv[-1] with NamedTemporaryFile(suffix='.zip') as f: with zipfile.ZipFile(f, 'w') as zf: with CurrentDir(path): for x in os.listdir('.'): if x[0] != '.': print ('Adding', x) zf.write(x) if os.path.isdir(x): for y in os.listdir(x): zf.write(os.path.join(x, y)) add_plugin(f.name) print ('Added plugin from', sys.argv[-1])
gpl-3.0
realmarcin/transform
lib/biokbase/Transform/util.py
1
24006
#!/usr/bin/env python import argparse import sys import os import os.path import io import time import datetime import traceback import ctypes import subprocess from subprocess import Popen, PIPE import shutil from optparse import OptionParser import requests from requests_toolbelt import MultipartEncoder import json import magic import gzip import bz2 import tarfile import zipfile import glob import ftplib import re try: from cStringIO import StringIO except: from StringIO import StringIO from biokbase.workspace.client import Workspace BUF_SIZE = 8*1024 # default HTTP LIB client buffer_size # Base class for Transform service class TransformBase: def __init__(self, args): self.shock_url = args.shock_url self.inobj_id = args.inobj_id self.sdir = args.sdir self.itmp = args.itmp if(hasattr(args, 'otmp')): self.otmp = args.otmp else: self.otmp = "output" self.token = os.environ.get('KB_AUTH_TOKEN') if hasattr(args, 'ssl_verify'): self.ssl_verify = args.ssl_verify else: self.ssl_verify = True if hasattr(args, 'url_list'): self.url_list = args.url_list else: self.url_list = ["ftp://ftp.ncbi.nlm.nih.gov/genomes/refseq/bacteria/Bacillus_subtilis/reference/GCF_000009045.1_ASM904v1/GCF_000009045.1_ASM904v1_genomic.gbff.gz"] def upload_to_shock(self): if self.token is None: raise Exception("Unable to find token!") filePath = "{}/{}".format(self.sdir,self.otmp) #build the header header = dict() header["Authorization"] = "Oauth %s" % self.token dataFile = open(os.path.abspath(filePath)) m = MultipartEncoder(fields={'upload': (os.path.split(filePath)[-1], dataFile)}) header['Content-Type'] = m.content_type try: response = requests.post(self.shock_url + "/node", headers=header, data=m, allow_redirects=True, verify=self.ssl_verify) dataFile.close() if not response.ok: response.raise_for_status() result = response.json() if result['error']: raise Exception(result['error'][0]) else: return result["data"] except: dataFile.close() raise def download_shock_data(self) : if self.token is None: raise Exception("Unable to find token!") # TODO: Improve folder checking if not os.path.isdir(self.sdir): try: os.mkdir(self.sdir) except: raise src_list = self.inobj_id.split(',') header = dict() header["Authorization"] = "OAuth {0}".format(self.token) # set chunk size to 10MB chunkSize = 10 * 2**20 for sid in src_list: surl = ""; fileName = "" fileSize = 0 # TODO: let's improve shock node detection using actually trying it if (sid.startswith('http') or sid.startswith('ftp')) and not (re.search(r'^http[s]?.*/node/[a-fA-F0-9\-]+\?.*', sid)): surl = sid fileName = sid.split('/')[-1].split('#')[0].split('?')[0] #TODO: add file size estimation code here # reset header here with user id and password header = dict() else: m = re.search(r'^(http[s]?.*/node/[a-fA-F0-9\-]+)\?.*', sid) if m is not None: metadata = requests.get(m.group(1), headers=header, stream=True, verify=self.ssl_verify) md = metadata.json() fileName = md['data']['file']['name'] fileSize = md['data']['file']['size'] metadata.close() surl = sid; else: metadata = requests.get("{0}/node/{1}?verbosity=metadata".format(self.shock_url, sid), headers=header, stream=True, verify=self.ssl_verify) md = metadata.json() fileName = md['data']['file']['name'] fileSize = md['data']['file']['size'] metadata.close() surl = "{0}/node/{1}?download_raw".format(self.shock_url, sid) data = requests.get(surl, headers=header, stream=True, verify=self.ssl_verify) size = int(data.headers['content-length']) if(size > 0 and fileSize == 0): fileSize = size filePath = os.path.join(self.sdir, fileName) f = io.open(filePath, 'wb') try: for chunk in data.iter_content(chunkSize): f.write(chunk) finally: data.close() f.close() self.extract_data(filePath) def download_from_urls(self, url_list, chunkSize=10 * 2**20): if self.token is None: raise Exception("Unable to find token!") # TODO: Improve folder checking if not os.path.isdir(self.sdir): os.mkdir(self.sdir) for url in url_list: data = None # detect url type if url.startswith("ftp://"): threshold = 1024 # check if file or directory host = url.split("ftp://")[1].split("/")[0] path = url.split("ftp://")[1].split("/", 1)[1] ftp_connection = ftplib.FTP(host) ftp_connection.login() file_list = ftp_connection.sendcmd("MLST {0}".format(path)) if file_list.find("type=dir") > -1: file_list = ftp_connection.nlst(path) if len(file_list) > 1: if len(file_list) > threshold: raise Exception("Too many files to process, found so far {0:d}".format(len(file_list))) dirname = path.split("/")[-1] if len(dirname) == 0: dirname = path.split("/")[-2] all_files = list() check = file_list[:] while len(check) > 0: x = check.pop() new_files = ftp_connection.nlst(x) for n in new_files: details = ftp_connection.sendcmd("MLST {0}".format(n)) if "type=file" in details: all_files.append(n) elif "type=dir" in details: check.append(n) if len(all_files) > threshold: raise Exception("Too many files to process, found so far {0:d}".format(len(all_files))) os.mkdir(dirname) for x in all_files: with open(os.path.join(os.path.abspath(dirname), os.path.basename(x)), 'wb') as f: print "Downloading {0}".format(host + x) ftp_connection.retrbinary("RETR {0}".format(x), lambda s: f.write(s), 10 * 2**20) self.extract_data(os.path.join(os.path.abspath(dirname), os.path.basename(x))) else: with open(os.path.join(self.sdir, os.path.split(path)[-1]), 'wb') as f: print "Downloading {0}".format(url) ftp_connection.retrbinary("RETR {0}".format(path), lambda s: f.write(s), 10 * 2**20) self.extract_data(os.path.join(self.sdir, os.path.split(path)[-1])) ftp_connection.close() elif url.startswith("http://"): print "Downloading {0}".format(url) # check if shock data = requests.get(url, stream=True) fileName = url.split("/")[-1] filePath = os.path.join(self.sdir, fileName) f = io.open(filePath, 'wb') try: for chunk in data.iter_content(chunkSize): f.write(chunk) finally: data.close() f.close() self.extract_data(filePath) elif url.startswith("https://"): print "Downloading {0}".format(url) # check if shock data = requests.get(url, stream=True, verify=self.ssl_verify) size = int(data.headers['content-length']) fileName = url.split("/")[-1] filePath = os.path.join(self.sdir, fileName) f = io.open(filePath, 'wb') try: for chunk in data.iter_content(chunkSize): f.write(chunk) finally: data.close() f.close() self.extract_data(filePath) def extract_data(self, filePath, chunkSize=10 * 2**20): def extract_tar(tarPath): if not tarfile.is_tarfile(tarPath): raise Exception("Inavalid tar file " + tarPath) with tarfile.open(tarPath, 'r') as tarDataFile: memberlist = tarDataFile.getmembers() for member in memberlist: memberPath = os.path.join(os.path.dirname(os.path.abspath(tarPath)),os.path.basename(os.path.abspath(member.name))) if member.isfile(): print "\t\tExtracting {0:f} MB from {1} in {2}".format(int(member.size)/float(1024*1024),memberPath,tarPath) with open(memberPath, 'wb') as f: inputFile = tarDataFile.extractfile(member.name) f.write(inputFile.read(chunkSize)) os.remove(tarPath) mimeType = None with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as m: mimeType = m.id_filename(filePath) print "Extracting {0} as {1}".format(filePath, mimeType) if mimeType == "application/x-gzip": outFile = os.path.splitext(filePath)[0] with gzip.GzipFile(filePath, 'rb') as gzipDataFile, open(outFile, 'wb') as f: for chunk in gzipDataFile: f.write(chunk) os.remove(filePath) outPath = os.path.dirname(filePath) # check for tar with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as m: mimeType = m.id_filename(outFile) if mimeType == "application/x-tar": print "Extracting {0} as tar".format(outFile) extract_tar(os.path.dirname(outFile)) elif mimeType == "application/x-bzip2": outFile = os.path.splitext(filePath)[0] with bz2.BZ2File(filePath, 'r') as bz2DataFile, open(outFile, 'wb') as f: for chunk in bz2DataFile: f.write(chunk) os.remove(filePath) outPath = os.path.dirname(filePath) # check for tar with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as m: mimeType = m.id_filename(outFile) if mimeType == "application/x-tar": print "Extracting {0} as tar".format(outFile) extract_tar(os.path.dirname(outFile)) elif mimeType == "application/zip": if not zipfile.is_zipfile(filePath): raise Exception("Invalid zip file!") outPath = os.path.dirname(filePath) with zipfile.ZipFile(filePath, 'r') as zipDataFile: bad = zipDataFile.testzip() if bad is not None: raise Exception("Encountered a bad file in the zip : " + str(bad)) infolist = zipDataFile.infolist() # perform sanity check on file names, extract each file individually for x in infolist: infoPath = os.path.join(outPath, os.path.basename(os.path.abspath(x.filename))) if not os.path.exists(os.path.dirname(x.filename)): os.makedirs(infoPath) if os.path.exists(os.path.join(infoPath,os.path.split(x.filename)[-1])): raise Exception("Extracting zip contents will overwrite an existing file!") with open(infoPath, 'wb') as f: f.write(zipDataFile.read(x.filename)) os.remove(filePath) elif mimeType == "application/x-gtar": if not tarfile.is_tarfile(filePath): raise Exception("Inavalid tar file " + filePath) outPath = os.path.dirname(filePath) with tarfile.open(filePath, 'r|*') as tarDataFile: memberlist = tarDataFile.getmembers() # perform sanity check on file names, extract each file individually for member in memberlist: memberPath = os.path.join(outPath, os.path.basename(os.path.abspath(member.name))) if os.path.exists(os.path.dirname(infoPath)): os.makedirs(infoPath) if os.path.exists(os.path.join(infoPath,os.path.split(member.name)[-1])): raise Exception("Extracting tar contents will overwrite an existing file!") if member.isfile(): with open(memberPath, 'wb') as f, tarDataFile.extractfile(member.name) as inputFile: f.write(inputFile.read(chunkSize)) os.remove(filePath) class Validator(TransformBase): def __init__(self, args): TransformBase.__init__(self,args) self.ws_url = args.ws_url self.cfg_name = args.cfg_name self.sws_id = args.sws_id self.etype = args.etype self.opt_args = args.opt_args # download ws object and find where the validation script is located self.wsd = Workspace(url=self.ws_url, token=self.token) self.config = self.wsd.get_object({'id' : self.cfg_name, 'workspace' : self.sws_id})['data']['config_map'] if self.config is None: raise Exception("Object {} not found in workspace {}".format(self.cfg_name, self.sws_id)) def validation_handler (self) : ### # execute validation ## TODO: Add logging if self.etype not in self.config['validator']: raise Exception("No validation script was registered for {}".format(self.etype)) fd_list = [] if os.path.exists("{}/{}".format(self.sdir,self.itmp)): fd_list.append( "{}/{}".format(self.sdir,self.itmp)) else: fd_list = glob.glob("{}/{}_*".format(self.sdir,self.itmp)) for fd in fd_list: vcmd_lst = [self.config['validator'][self.etype]['cmd_name'], self.config['validator'][self.etype]['cmd_args']['input'], fd] if 'validator' in self.opt_args: opt_args = self.opt_args['validator'] for k in opt_args: if k in self.config['validator'][self.etype]['opt_args'] and opt_args[k] is not None: vcmd_lst.append(self.config['validator'][self.etype]['opt_args'][k]) vcmd_lst.append(opt_args[k]) p1 = Popen(vcmd_lst, stdout=PIPE) out_str = p1.communicate() # print output message for error tracking if out_str[0] is not None : print out_str[0] if out_str[1] is not None : print >> sys.stderr, out_str[1] if p1.returncode != 0: raise Exception(out_str[1]) class Uploader(Validator): def __init__(self, args): Validator.__init__(self, args) self.kbtype = args.kbtype self.ws_id = args.ws_id self.outobj_id = args.outobj_id self.jid = args.jid self.otmp = args.otmp def transformation_handler (self) : conv_type = "{}-to-{}".format(self.etype, self.kbtype) if conv_type not in self.config['transformer']: raise Exception("No conversion script was registered for {}".format(conv_type)) vcmd_lst = [self.config['transformer'][conv_type]['cmd_name']] vcmd_lst.append(self.config['transformer'][conv_type]['cmd_args']['input']) if 'cmd_args_override' in self.config['transformer'][conv_type] and 'input' in self.config['transformer'][conv_type]['cmd_args_override']: if self.config['transformer'][conv_type]['cmd_args_override']['input'] == 'shock_node_id': # use shock node id vcmd_lst.append(self.inobj_id) else: vcmd_lst.append("{}/{}".format(self.sdir,self.itmp)) # not defined yet else: vcmd_lst.append("{}/{}".format(self.sdir,self.itmp)) # default input is the input file or folder vcmd_lst.append(self.config['transformer'][conv_type]['cmd_args']['output']) if 'cmd_args_override' in self.config['transformer'][conv_type] and 'output' in self.config['transformer'][conv_type]['cmd_args_override']: vcmd_lst.append("{}/{}".format(self.sdir,self.otmp)) # not defined yet else: vcmd_lst.append("{}/{}".format(self.sdir,self.otmp)) if 'transformer' in self.opt_args: opt_args = self.opt_args['transformer'] for k in opt_args: if k in self.config['transformer'][conv_type]['opt_args'] and opt_args[k] is not None: vcmd_lst.append(self.config['transformer'][conv_type]['opt_args'][k]) vcmd_lst.append(opt_args[k]) p1 = Popen(vcmd_lst, stdout=PIPE) out_str = p1.communicate() # print output message for error tracking if out_str[0] is not None : print out_str[0] if out_str[1] is not None : print >> sys.stderr, out_str[1] if p1.returncode != 0: raise Exception(out_str[1]) def upload_handler (self) : if self.kbtype in self.config['uploader']: # upload handler is registered vcmd_lst = [self.config['uploader'][self.kbtype]['cmd_name'], self.config['uploader'][self.kbtype]['cmd_args']['ws_url'], self.ws_url, self.config['uploader'][self.kbtype]['cmd_args']['ws_id'], self.ws_id, self.config['uploader'][self.kbtype]['cmd_args']['outobj_id'], self.outobj_id, self.config['uploader'][self.kbtype]['cmd_args']['dir'], self.sdir ] if 'uploader' in self.opt_args: opt_args = self.opt_args['uploader'] for k in opt_args: if k in self.config['uploader'][self.kbtype]['opt_args'] and opt_args[k] is not None: vcmd_lst.append(self.config['uploader'][self.kbtype]['opt_args'][k]) vcmd_lst.append(opt_args[k]) print vcmd_lst p1 = Popen(vcmd_lst, stdout=PIPE) out_str = p1.communicate() # print output message for error tracking if out_str[0] is not None : print out_str[0] if out_str[1] is not None : print >> sys.stderr, out_str[1] if p1.returncode != 0: raise Exception(out_str[1]) else: # upload handler was not registered self.upload_to_ws() # use default WS uploader def upload_to_ws_args (self, sdir,otmp,ws_id,kbtype,outobj_id,inobj_id,etype,jid) : jif = open("{}/{}".format(sdir,otmp, 'r')) data = json.loads(jif.read()) jif.close() self.wsd.save_objects({'workspace':ws_id, 'objects' : [ { 'type' : kbtype, 'data' : data, 'name' : outobj_id, 'meta' : { 'source_id' : inobj_id, 'source_type' : etype, 'ujs_job_id' : jid} } ]}) def upload_to_ws (self) : jif = open("{}/{}".format(self.sdir,self.otmp, 'r')) data = json.loads(jif.read()) jif.close() self.wsd.save_objects({'workspace':self.ws_id, 'objects' : [ { 'type' : self.kbtype, 'data' : data, 'name' : self.outobj_id, 'meta' : { 'source_id' : self.inobj_id, 'source_type' : self.etype, 'ujs_job_id' : self.jid} } ]}) class Downloader(TransformBase): def __init__(self, args): TransformBase.__init__(self,args) self.ws_url = args.ws_url self.cfg_name = args.cfg_name self.sws_id = args.sws_id self.etype = args.etype self.opt_args = args.opt_args self.kbtype = args.kbtype #self.otmp = args.otmp self.ws_id = args.ws_id #self.outobj_id = args.outobj_id self.jid = args.jid # download ws object and find where the validation script is located self.wsd = Workspace(url=self.ws_url, token=self.token) self.config = self.wsd.get_object({'id' : self.cfg_name, 'workspace' : self.sws_id})['data']['config_map'] if self.config is None: raise Exception("Object {} not found in workspace {}".format(self.cfg_name, self.sws_id)) def download_ws_data (self) : try: os.mkdir(self.sdir) except: pass dif = open("{}/{}".format(self.sdir, "{}".format(self.itmp)),'w') data = self.wsd.get_object({'id' : self.inobj_id, 'workspace' : self.ws_id})['data'] json.dump(data,dif) dif.close() #def download_handler (ws_url, cfg_name, sws_id, ws_id, in_id, etype, kbtype, sdir, otmp, opt_args, ujs_url, ujs_jid) : def download_handler (self) : try: os.mkdir(self.sdir) except: pass conv_type = "{}-to-{}".format(self.kbtype, self.etype) if conv_type not in self.config['down_transformer'] or 'inobj_id' not in self.config['down_transformer'][conv_type]['cmd_args'] or 'ws_id' not in self.config['down_transformer'][conv_type]['cmd_args'] or 'output' not in self.config['down_transformer'][conv_type]['cmd_args']: raise Exception("{} to {} conversion was not properly defined!".format(self.kbtype, self.etype)) vcmd_lst = [self.config['down_transformer'][conv_type]['cmd_name'], self.config['down_transformer'][conv_type]['cmd_args']['ws_id'], self.ws_id, self.config['down_transformer'][conv_type]['cmd_args']['inobj_id'], self.inobj_id, self.config['down_transformer'][conv_type]['cmd_args']['output'],"{}/{}".format(self.sdir,self.otmp)] if 'down_transformer' in self.opt_args: opt_args = self.opt_args['down_transformer'] for k in opt_args: if k in self.config['down_transformer'][conv_type]['opt_args'] and opt_args[k] is not None: vcmd_lst.append(self.config['down_transformer'][conv_type]['opt_args'][k]) vcmd_lst.append(opt_args[k]) p1 = Popen(vcmd_lst, stdout=PIPE) out_str = p1.communicate() if out_str[0] is not None : print out_str[0] if out_str[1] is not None : print >> sys.stderr, out_str[1] if p1.returncode != 0: raise Exception(out_str[1])
mit
CuriousLearner/kivy
kivy/lang/parser.py
3
25711
''' Parser ====== Class used for the parsing of .kv files into rules. ''' import os import re import sys import traceback from re import sub, findall from types import CodeType from functools import partial from collections import OrderedDict, defaultdict import kivy.lang.builder # imported as absolute to avoid circular import from kivy.logger import Logger from kivy.cache import Cache from kivy import require from kivy.utils import rgba import kivy.metrics as Metrics __all__ = ('Parser', 'ParserException') trace = Logger.trace global_idmap = {} # register cache for creating new classtype (template) Cache.register('kv.lang') # all previously included files __KV_INCLUDES__ = [] # precompile regexp expression lang_str = re.compile( "((?:'''.*?''')|" "(?:(?:(?<!')|''')'(?:[^']|\\\\')+?'(?:(?!')|'''))|" '(?:""".*?""")|' '(?:(?:(?<!")|""")"(?:[^"]|\\\\")+?"(?:(?!")|""")))', re.DOTALL) lang_key = re.compile('([a-zA-Z_]+)') lang_keyvalue = re.compile('([a-zA-Z_][a-zA-Z0-9_.]*\.[a-zA-Z0-9_.]+)') lang_tr = re.compile('(_\()') lang_cls_split_pat = re.compile(', *') # all the widget handlers, used to correctly unbind all the callbacks then the # widget is deleted _handlers = defaultdict(partial(defaultdict, list)) class ProxyApp(object): # proxy app object # taken from http://code.activestate.com/recipes/496741-object-proxying/ __slots__ = ['_obj'] def __init__(self): object.__init__(self) object.__setattr__(self, '_obj', None) def _ensure_app(self): app = object.__getattribute__(self, '_obj') if app is None: from kivy.app import App app = App.get_running_app() object.__setattr__(self, '_obj', app) # Clear cached application instance, when it stops app.bind(on_stop=lambda instance: object.__setattr__(self, '_obj', None)) return app def __getattribute__(self, name): object.__getattribute__(self, '_ensure_app')() return getattr(object.__getattribute__(self, '_obj'), name) def __delattr__(self, name): object.__getattribute__(self, '_ensure_app')() delattr(object.__getattribute__(self, '_obj'), name) def __setattr__(self, name, value): object.__getattribute__(self, '_ensure_app')() setattr(object.__getattribute__(self, '_obj'), name, value) def __bool__(self): object.__getattribute__(self, '_ensure_app')() return bool(object.__getattribute__(self, '_obj')) def __str__(self): object.__getattribute__(self, '_ensure_app')() return str(object.__getattribute__(self, '_obj')) def __repr__(self): object.__getattribute__(self, '_ensure_app')() return repr(object.__getattribute__(self, '_obj')) global_idmap['app'] = ProxyApp() global_idmap['pt'] = Metrics.pt global_idmap['inch'] = Metrics.inch global_idmap['cm'] = Metrics.cm global_idmap['mm'] = Metrics.mm global_idmap['dp'] = Metrics.dp global_idmap['sp'] = Metrics.sp global_idmap['rgba'] = rgba class ParserException(Exception): '''Exception raised when something wrong happened in a kv file. ''' def __init__(self, context, line, message, cause=None): self.filename = context.filename or '<inline>' self.line = line sourcecode = context.sourcecode sc_start = max(0, line - 2) sc_stop = min(len(sourcecode), line + 3) sc = ['...'] for x in range(sc_start, sc_stop): if x == line: sc += ['>> %4d:%s' % (line + 1, sourcecode[line][1])] else: sc += [' %4d:%s' % (x + 1, sourcecode[x][1])] sc += ['...'] sc = '\n'.join(sc) message = 'Parser: File "%s", line %d:\n%s\n%s' % ( self.filename, self.line + 1, sc, message) if cause: message += '\n' + ''.join(traceback.format_tb(cause)) super(ParserException, self).__init__(message) class ParserRuleProperty(object): '''Represent a property inside a rule. ''' __slots__ = ('ctx', 'line', 'name', 'value', 'co_value', 'watched_keys', 'mode', 'count', 'ignore_prev') def __init__(self, ctx, line, name, value, ignore_prev=False): super(ParserRuleProperty, self).__init__() #: Associated parser self.ctx = ctx #: Line of the rule self.line = line #: Name of the property self.name = name #: Value of the property self.value = value #: Compiled value self.co_value = None #: Compilation mode self.mode = None #: Watched keys self.watched_keys = None #: Stats self.count = 0 #: whether previous rules targeting name should be cleared self.ignore_prev = ignore_prev def precompile(self): name = self.name value = self.value # first, remove all the string from the value tmp = sub(lang_str, '', self.value) # detecting how to handle the value according to the key name mode = self.mode if self.mode is None: self.mode = mode = 'exec' if name[:3] == 'on_' else 'eval' if mode == 'eval': # if we don't detect any string/key in it, we can eval and give the # result if re.search(lang_key, tmp) is None: self.co_value = eval(value) return # ok, we can compile. value = '\n' * self.line + value self.co_value = compile(value, self.ctx.filename or '<string>', mode) # for exec mode, we don't need to watch any keys. if mode == 'exec': return # now, detect obj.prop # first, remove all the string from the value tmp = sub(lang_str, '', value) idx = tmp.find('#') if idx != -1: tmp = tmp[:idx] # detect key.value inside value, and split them wk = list(set(findall(lang_keyvalue, tmp))) if len(wk): self.watched_keys = [x.split('.') for x in wk] if findall(lang_tr, tmp): if self.watched_keys: self.watched_keys += [['_']] else: self.watched_keys = [['_']] def __repr__(self): return '<ParserRuleProperty name=%r filename=%s:%d ' \ 'value=%r watched_keys=%r>' % ( self.name, self.ctx.filename, self.line + 1, self.value, self.watched_keys) class ParserRule(object): '''Represents a rule, in terms of the Kivy internal language. ''' __slots__ = ('ctx', 'line', 'name', 'children', 'id', 'properties', 'canvas_before', 'canvas_root', 'canvas_after', 'handlers', 'level', 'cache_marked', 'avoid_previous_rules') def __init__(self, ctx, line, name, level): super(ParserRule, self).__init__() #: Level of the rule in the kv self.level = level #: Associated parser self.ctx = ctx #: Line of the rule self.line = line #: Name of the rule self.name = name #: List of children to create self.children = [] #: Id given to the rule self.id = None #: Properties associated to the rule self.properties = OrderedDict() #: Canvas normal self.canvas_root = None #: Canvas before self.canvas_before = None #: Canvas after self.canvas_after = None #: Handlers associated to the rule self.handlers = [] #: Properties cache list: mark which class have already been checked self.cache_marked = [] #: Indicate if any previous rules should be avoided. self.avoid_previous_rules = False if level == 0: self._detect_selectors() else: self._forbid_selectors() def precompile(self): for x in self.properties.values(): x.precompile() for x in self.handlers: x.precompile() for x in self.children: x.precompile() if self.canvas_before: self.canvas_before.precompile() if self.canvas_root: self.canvas_root.precompile() if self.canvas_after: self.canvas_after.precompile() def create_missing(self, widget): # check first if the widget class already been processed by this rule cls = widget.__class__ if cls in self.cache_marked: return self.cache_marked.append(cls) for name in self.properties: if hasattr(widget, name): continue value = self.properties[name].co_value if type(value) is CodeType: value = None widget.create_property(name, value) def _forbid_selectors(self): c = self.name[0] if c == '<' or c == '[': raise ParserException( self.ctx, self.line, 'Selectors rules are allowed only at the first level') def _detect_selectors(self): c = self.name[0] if c == '<': self._build_rule() elif c == '[': self._build_template() else: if self.ctx.root is not None: raise ParserException( self.ctx, self.line, 'Only one root object is allowed by .kv') self.ctx.root = self def _build_rule(self): name = self.name if __debug__: trace('Builder: build rule for %s' % name) if name[0] != '<' or name[-1] != '>': raise ParserException(self.ctx, self.line, 'Invalid rule (must be inside <>)') # if the very first name start with a -, avoid previous rules name = name[1:-1] if name[:1] == '-': self.avoid_previous_rules = True name = name[1:] for rule in re.split(lang_cls_split_pat, name): crule = None if not len(rule): raise ParserException(self.ctx, self.line, 'Empty rule detected') if '@' in rule: # new class creation ? # ensure the name is correctly written rule, baseclasses = rule.split('@', 1) if not re.match(lang_key, rule): raise ParserException(self.ctx, self.line, 'Invalid dynamic class name') # save the name in the dynamic classes dict. self.ctx.dynamic_classes[rule] = baseclasses crule = ParserSelectorName(rule) else: # classical selectors. if rule[0] == '.': crule = ParserSelectorClass(rule[1:]) elif rule[0] == '#': crule = ParserSelectorId(rule[1:]) else: crule = ParserSelectorName(rule) self.ctx.rules.append((crule, self)) def _build_template(self): name = self.name if __debug__: trace('Builder: build template for %s' % name) if name[0] != '[' or name[-1] != ']': raise ParserException(self.ctx, self.line, 'Invalid template (must be inside [])') item_content = name[1:-1] if not '@' in item_content: raise ParserException(self.ctx, self.line, 'Invalid template name (missing @)') template_name, template_root_cls = item_content.split('@') self.ctx.templates.append((template_name, template_root_cls, self)) def __repr__(self): return '<ParserRule name=%r>' % (self.name, ) class Parser(object): '''Create a Parser object to parse a Kivy language file or Kivy content. ''' PROP_ALLOWED = ('canvas.before', 'canvas.after') CLASS_RANGE = list(range(ord('A'), ord('Z') + 1)) PROP_RANGE = ( list(range(ord('A'), ord('Z') + 1)) + list(range(ord('a'), ord('z') + 1)) + list(range(ord('0'), ord('9') + 1)) + [ord('_')]) __slots__ = ('rules', 'templates', 'root', 'sourcecode', 'directives', 'filename', 'dynamic_classes') def __init__(self, **kwargs): super(Parser, self).__init__() self.rules = [] self.templates = [] self.root = None self.sourcecode = [] self.directives = [] self.dynamic_classes = {} self.filename = kwargs.get('filename', None) content = kwargs.get('content', None) if content is None: raise ValueError('No content passed') self.parse(content) def execute_directives(self): global __KV_INCLUDES__ for ln, cmd in self.directives: cmd = cmd.strip() if __debug__: trace('Parser: got directive <%s>' % cmd) if cmd[:5] == 'kivy ': version = cmd[5:].strip() if len(version.split('.')) == 2: version += '.0' require(version) elif cmd[:4] == 'set ': try: name, value = cmd[4:].strip().split(' ', 1) except: Logger.exception('') raise ParserException(self, ln, 'Invalid directive syntax') try: value = eval(value) except: Logger.exception('') raise ParserException(self, ln, 'Invalid value') global_idmap[name] = value elif cmd[:8] == 'include ': ref = cmd[8:].strip() force_load = False if ref[:6] == 'force ': ref = ref[6:].strip() force_load = True if ref[-3:] != '.kv': Logger.warn('WARNING: {0} does not have a valid Kivy' 'Language extension (.kv)'.format(ref)) break if ref in __KV_INCLUDES__: if not os.path.isfile(ref): raise ParserException(self, ln, 'Invalid or unknown file: {0}'.format(ref)) if not force_load: Logger.warn('WARNING: {0} has already been included!' .format(ref)) break else: Logger.debug('Reloading {0} because include was forced.' .format(ref)) kivy.lang.builder.Builder.unload_file(ref) kivy.lang.builder.Builder.load_file(ref) continue Logger.debug('Including file: {0}'.format(0)) __KV_INCLUDES__.append(ref) kivy.lang.builder.Builder.load_file(ref) elif cmd[:7] == 'import ': package = cmd[7:].strip() l = package.split(' ') if len(l) != 2: raise ParserException(self, ln, 'Invalid import syntax') alias, package = l try: if package not in sys.modules: try: mod = __import__(package) except ImportError: mod = __import__('.'.join(package.split('.')[:-1])) # resolve the whole thing for part in package.split('.')[1:]: mod = getattr(mod, part) else: mod = sys.modules[package] global_idmap[alias] = mod except ImportError: Logger.exception('') raise ParserException(self, ln, 'Unable to import package %r' % package) else: raise ParserException(self, ln, 'Unknown directive') def parse(self, content): '''Parse the contents of a Parser file and return a list of root objects. ''' # Read and parse the lines of the file lines = content.splitlines() if not lines: return num_lines = len(lines) lines = list(zip(list(range(num_lines)), lines)) self.sourcecode = lines[:] if __debug__: trace('Parser: parsing %d lines' % num_lines) # Strip all comments self.strip_comments(lines) # Execute directives self.execute_directives() # Get object from the first level objects, remaining_lines = self.parse_level(0, lines) # Precompile rules tree for rule in objects: rule.precompile() # After parsing, there should be no remaining lines # or there's an error we did not catch earlier. if remaining_lines: ln, content = remaining_lines[0] raise ParserException(self, ln, 'Invalid data (not parsed)') def strip_comments(self, lines): '''Remove all comments from all lines in-place. Comments need to be on a single line and not at the end of a line. i.e. a comment line's first non-whitespace character must be a #. ''' # extract directives for ln, line in lines[:]: stripped = line.strip() if stripped[:2] == '#:': self.directives.append((ln, stripped[2:])) if stripped[:1] == '#': lines.remove((ln, line)) if not stripped: lines.remove((ln, line)) def parse_level(self, level, lines, spaces=0): '''Parse the current level (level * spaces) indentation. ''' indent = spaces * level if spaces > 0 else 0 objects = [] current_object = None current_property = None current_propobject = None i = 0 while i < len(lines): line = lines[i] ln, content = line # Get the number of space tmp = content.lstrip(' \t') # Replace any tab with 4 spaces tmp = content[:len(content) - len(tmp)] tmp = tmp.replace('\t', ' ') # first indent designates the indentation if spaces == 0: spaces = len(tmp) count = len(tmp) if spaces > 0 and count % spaces != 0: raise ParserException(self, ln, 'Invalid indentation, ' 'must be a multiple of ' '%s spaces' % spaces) content = content.strip() rlevel = count // spaces if spaces > 0 else 0 # Level finished if count < indent: return objects, lines[i - 1:] # Current level, create an object elif count == indent: x = content.split(':', 1) if not len(x[0]): raise ParserException(self, ln, 'Identifier missing') if (len(x) == 2 and len(x[1]) and not x[1].lstrip().startswith('#')): raise ParserException(self, ln, 'Invalid data after declaration') name = x[0] # if it's not a root rule, then we got some restriction # aka, a valid name, without point or everything else if count != 0: if False in [ord(z) in Parser.PROP_RANGE for z in name]: raise ParserException(self, ln, 'Invalid class name') current_object = ParserRule(self, ln, x[0], rlevel) current_property = None objects.append(current_object) # Next level, is it a property or an object ? elif count == indent + spaces: x = content.split(':', 1) if not len(x[0]): raise ParserException(self, ln, 'Identifier missing') # It's a class, add to the current object as a children current_property = None name = x[0] ignore_prev = name[0] == '-' if ignore_prev: name = name[1:] if ord(name[0]) in Parser.CLASS_RANGE: if ignore_prev: raise ParserException( self, ln, 'clear previous, `-`, not allowed here') _objects, _lines = self.parse_level( level + 1, lines[i:], spaces) current_object.children = _objects lines = _lines i = 0 # It's a property else: if name not in Parser.PROP_ALLOWED: if not all(ord(z) in Parser.PROP_RANGE for z in name): raise ParserException(self, ln, 'Invalid property name') if len(x) == 1: raise ParserException(self, ln, 'Syntax error') value = x[1].strip() if name == 'id': if len(value) <= 0: raise ParserException(self, ln, 'Empty id') if value in ('self', 'root'): raise ParserException( self, ln, 'Invalid id, cannot be "self" or "root"') current_object.id = value elif len(value): rule = ParserRuleProperty( self, ln, name, value, ignore_prev) if name[:3] == 'on_': current_object.handlers.append(rule) else: ignore_prev = False current_object.properties[name] = rule else: current_property = name current_propobject = None if ignore_prev: # it wasn't consumed raise ParserException( self, ln, 'clear previous, `-`, not allowed here') # Two more levels? elif count == indent + 2 * spaces: if current_property in ( 'canvas', 'canvas.after', 'canvas.before'): _objects, _lines = self.parse_level( level + 2, lines[i:], spaces) rl = ParserRule(self, ln, current_property, rlevel) rl.children = _objects if current_property == 'canvas': current_object.canvas_root = rl elif current_property == 'canvas.before': current_object.canvas_before = rl else: current_object.canvas_after = rl current_property = None lines = _lines i = 0 else: if current_propobject is None: current_propobject = ParserRuleProperty( self, ln, current_property, content) if current_property[:3] == 'on_': current_object.handlers.append(current_propobject) else: current_object.properties[current_property] = \ current_propobject else: current_propobject.value += '\n' + content # Too much indentation, invalid else: raise ParserException(self, ln, 'Invalid indentation (too many levels)') # Check the next line i += 1 return objects, [] class ParserSelector(object): def __init__(self, key): self.key = key.lower() def match(self, widget): raise NotImplemented() def __repr__(self): return '<%s key=%s>' % (self.__class__.__name__, self.key) class ParserSelectorId(ParserSelector): def match(self, widget): if widget.id: return widget.id.lower() == self.key class ParserSelectorClass(ParserSelector): def match(self, widget): return self.key in widget.cls class ParserSelectorName(ParserSelector): parents = {} def get_bases(self, cls): for base in cls.__bases__: if base.__name__ == 'object': break yield base if base.__name__ == 'Widget': break for cbase in self.get_bases(base): yield cbase def match(self, widget): parents = ParserSelectorName.parents cls = widget.__class__ if not cls in parents: classes = [x.__name__.lower() for x in [cls] + list(self.get_bases(cls))] parents[cls] = classes return self.key in parents[cls] def match_rule_name(self, rule_name): return self.key == rule_name.lower()
mit
jakevdp/scipy
scipy/integrate/__init__.py
35
2256
""" ============================================= Integration and ODEs (:mod:`scipy.integrate`) ============================================= .. currentmodule:: scipy.integrate Integrating functions, given function object ============================================ .. autosummary:: :toctree: generated/ quad -- General purpose integration dblquad -- General purpose double integration tplquad -- General purpose triple integration nquad -- General purpose n-dimensional integration fixed_quad -- Integrate func(x) using Gaussian quadrature of order n quadrature -- Integrate with given tolerance using Gaussian quadrature romberg -- Integrate func using Romberg integration quad_explain -- Print information for use of quad newton_cotes -- Weights and error coefficient for Newton-Cotes integration IntegrationWarning -- Warning on issues during integration Integrating functions, given fixed samples ========================================== .. autosummary:: :toctree: generated/ trapz -- Use trapezoidal rule to compute integral. cumtrapz -- Use trapezoidal rule to cumulatively compute integral. simps -- Use Simpson's rule to compute integral from samples. romb -- Use Romberg Integration to compute integral from -- (2**k + 1) evenly-spaced samples. .. seealso:: :mod:`scipy.special` for orthogonal polynomials (special) for Gaussian quadrature roots and weights for other weighting factors and regions. Integrators of ODE systems ========================== .. autosummary:: :toctree: generated/ odeint -- General integration of ordinary differential equations. ode -- Integrate ODE using VODE and ZVODE routines. complex_ode -- Convert a complex-valued ODE to real-valued and integrate. solve_bvp -- Solve a boundary value problem for a system of ODEs. """ from __future__ import division, print_function, absolute_import from .quadrature import * from .odepack import * from .quadpack import * from ._ode import * from ._bvp import solve_bvp __all__ = [s for s in dir() if not s.startswith('_')] from numpy.testing import Tester test = Tester().test
bsd-3-clause
javier3407/Plugin.Video.ElJavi.tv
resources/lib/chardet/big5prober.py
2920
1684
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import Big5DistributionAnalysis from .mbcssm import Big5SMModel class Big5Prober(MultiByteCharSetProber): def __init__(self): MultiByteCharSetProber.__init__(self) self._mCodingSM = CodingStateMachine(Big5SMModel) self._mDistributionAnalyzer = Big5DistributionAnalysis() self.reset() def get_charset_name(self): return "Big5"
gpl-3.0
applecool/AI
Naive Bayes/naiveBayesDensityFunction.py
1
9669
# # Naive Bayes Classifier # # _____________________________________________________________________ import math class Classifier: def __init__(self, bucketPrefix, testBucketNumber, dataFormat): """ a classifier will be built from files with the bucketPrefix excluding the file with textBucketNumber. dataFormat is a string that describes how to interpret each line of the data files. For example, for the iHealth data the format is: "attr attr attr attr class" """ total = 0 classes = {} # counts used for attributes that are not numeric counts = {} # totals used for attributes that are numereric # we will use these to compute the mean and sample standard deviation for # each attribute - class pair. totals = {} numericValues = {} # reading the data in from the file self.format = dataFormat.strip().split('\t') # self.prior = {} self.conditional = {} # for each of the buckets numbered 1 through 10: for i in range(1, 11): # if it is not the bucket we should ignore, read in the data if i != testBucketNumber: filename = "%s-%02i" % (bucketPrefix, i) f = open(filename) lines = f.readlines() f.close() for line in lines: fields = line.strip().split('\t') ignore = [] vector = [] nums = [] for i in range(len(fields)): if self.format[i] == 'num': nums.append(float(fields[i])) elif self.format[i] == 'attr': vector.append(fields[i]) elif self.format[i] == 'comment': ignore.append(fields[i]) elif self.format[i] == 'class': category = fields[i] # now process this instance total += 1 classes.setdefault(category, 0) counts.setdefault(category, {}) totals.setdefault(category, {}) numericValues.setdefault(category, {}) classes[category] += 1 # now process each non-numeric attribute of the instance col = 0 for columnValue in vector: col += 1 counts[category].setdefault(col, {}) counts[category][col].setdefault(columnValue, 0) counts[category][col][columnValue] += 1 # process numeric attributes col = 0 for columnValue in nums: col += 1 totals[category].setdefault(col, 0) #totals[category][col].setdefault(columnValue, 0) totals[category][col] += columnValue numericValues[category].setdefault(col, []) numericValues[category][col].append(columnValue) # # ok done counting. now compute probabilities # # first prior probabilities p(h) # for (category, count) in classes.items(): self.prior[category] = count / total # # now compute conditional probabilities p(h|D) # for (category, columns) in counts.items(): self.conditional.setdefault(category, {}) for (col, valueCounts) in columns.items(): self.conditional[category].setdefault(col, {}) for (attrValue, count) in valueCounts.items(): self.conditional[category][col][attrValue] = ( count / classes[category]) self.tmp = counts # # now compute mean and sample standard deviation # self.means = {} self.totals = totals for (category, columns) in totals.items(): self.means.setdefault(category, {}) for (col, cTotal) in columns.items(): self.means[category][col] = cTotal / classes[category] # standard deviation self.ssd = {} for (category, columns) in numericValues.items(): self.ssd.setdefault(category, {}) for (col, values) in columns.items(): SumOfSquareDifferences = 0 theMean = self.means[category][col] for value in values: SumOfSquareDifferences += (value - theMean)**2 columns[col] = 0 self.ssd[category][col] = math.sqrt(SumOfSquareDifferences / (classes[category] - 1)) def testBucket(self, bucketPrefix, bucketNumber): """Evaluate the classifier with data from the file bucketPrefix-bucketNumber""" filename = "%s-%02i" % (bucketPrefix, bucketNumber) f = open(filename) lines = f.readlines() totals = {} f.close() loc = 1 for line in lines: loc += 1 data = line.strip().split('\t') vector = [] numV = [] classInColumn = -1 for i in range(len(self.format)): if self.format[i] == 'num': numV.append(float(data[i])) elif self.format[i] == 'attr': vector.append(data[i]) elif self.format[i] == 'class': classInColumn = i theRealClass = data[classInColumn] classifiedAs = self.classify(vector, numV) totals.setdefault(theRealClass, {}) totals[theRealClass].setdefault(classifiedAs, 0) totals[theRealClass][classifiedAs] += 1 return totals def classify(self, itemVector, numVector): """Return class we think item Vector is in""" results = [] sqrt2pi = math.sqrt(2 * math.pi) for (category, prior) in self.prior.items(): prob = prior col = 1 for attrValue in itemVector: if not attrValue in self.conditional[category][col]: # we did not find any instances of this attribute value # occurring with this category so prob = 0 prob = 0 else: prob = prob * self.conditional[category][col][attrValue] col += 1 col = 1 for x in numVector: mean = self.means[category][col] ssd = self.ssd[category][col] ePart = math.pow(math.e, -(x - mean)**2/(2*ssd**2)) prob = prob * ((1.0 / (sqrt2pi*ssd)) * ePart) col += 1 results.append((prob, category)) # return the category with the highest probability #print(results) return(max(results)[1]) def tenfold(bucketPrefix, dataFormat): results = {} for i in range(1, 11): c = Classifier(bucketPrefix, i, dataFormat) t = c.testBucket(bucketPrefix, i) for (key, value) in t.items(): results.setdefault(key, {}) for (ckey, cvalue) in value.items(): results[key].setdefault(ckey, 0) results[key][ckey] += cvalue # now print results categories = list(results.keys()) categories.sort() print( "\n Classified as: ") header = " " subheader = " +" for category in categories: header += "% 10s " % category subheader += "-------+" print (header) print (subheader) total = 0.0 correct = 0.0 for category in categories: row = " %10s |" % category for c2 in categories: if c2 in results[category]: count = results[category][c2] else: count = 0 row += " %5i |" % count total += count if c2 == category: correct += count print(row) print(subheader) print("\n%5.3f percent correct" %((correct * 100) / total)) print("total of %i instances" % total) def pdf(mean, ssd, x): """Probability Density Function computing P(x|y) input is the mean, sample standard deviation for all the items in y, and x.""" ePart = math.pow(math.e, -(x-mean)**2/(2*ssd**2)) print (ePart) return (1.0 / (math.sqrt(2*math.pi)*ssd)) * ePart #tenfold("house-votes/hv", "class\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr") #c = Classifier("house-votes/hv", 0, # "class\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr") tenfold("pimaSmall/pimaSmall", "num num num num num num num num class") tenfold("pima/pima", "num num num num num num num num class") #c = Classifier("iHealth/i", 10, # "attr\tattr\tattr\tattr\tclass") #print(c.classify([], [3, 78, 50, 32, 88, 31.0, 0.248, 26])) #c = Classifier("house-votes-filtered/hv", 5, "class\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr\tattr") #t = c.testBucket("house-votes-filtered/hv", 5) #print(t)
mit
pabloborrego93/edx-platform
common/lib/xmodule/xmodule/validation.py
202
5154
""" Extension of XBlock Validation class to include information for presentation in Studio. """ from xblock.validation import Validation, ValidationMessage class StudioValidationMessage(ValidationMessage): """ A message containing validation information about an xblock, extended to provide Studio-specific fields. """ # A special message type indicating that the xblock is not yet configured. This message may be rendered # in a different way within Studio. NOT_CONFIGURED = "not-configured" TYPES = [ValidationMessage.WARNING, ValidationMessage.ERROR, NOT_CONFIGURED] def __init__(self, message_type, message_text, action_label=None, action_class=None, action_runtime_event=None): """ Create a new message. Args: message_type (str): The type associated with this message. Most be `WARNING` or `ERROR`. message_text (unicode): The textual message. action_label (unicode): Text to show on a "fix-up" action (optional). If present, either `action_class` or `action_runtime_event` should be specified. action_class (str): A class to link to the "fix-up" action (optional). A click handler must be added for this class, unless it is "edit-button", "duplicate-button", or "delete-button" (which are all handled in general for xblock instances. action_runtime_event (str): An event name to be triggered on the xblock client-side runtime when the "fix-up" action is clicked (optional). """ super(StudioValidationMessage, self).__init__(message_type, message_text) if action_label is not None: if not isinstance(action_label, unicode): raise TypeError("Action label must be unicode.") self.action_label = action_label if action_class is not None: if not isinstance(action_class, basestring): raise TypeError("Action class must be a string.") self.action_class = action_class if action_runtime_event is not None: if not isinstance(action_runtime_event, basestring): raise TypeError("Action runtime event must be a string.") self.action_runtime_event = action_runtime_event def to_json(self): """ Convert to a json-serializable representation. Returns: dict: A dict representation that is json-serializable. """ serialized = super(StudioValidationMessage, self).to_json() if hasattr(self, "action_label"): serialized["action_label"] = self.action_label if hasattr(self, "action_class"): serialized["action_class"] = self.action_class if hasattr(self, "action_runtime_event"): serialized["action_runtime_event"] = self.action_runtime_event return serialized class StudioValidation(Validation): """ Extends `Validation` to add Studio-specific summary message. """ @classmethod def copy(cls, validation): """ Copies the `Validation` object to a `StudioValidation` object. This is a shallow copy. Args: validation (Validation): A `Validation` object to be converted to a `StudioValidation` instance. Returns: StudioValidation: A `StudioValidation` instance populated with the messages from supplied `Validation` object """ if not isinstance(validation, Validation): raise TypeError("Copy must be called with a Validation instance") studio_validation = cls(validation.xblock_id) studio_validation.messages = validation.messages return studio_validation def __init__(self, xblock_id): """ Create a `StudioValidation` instance. Args: xblock_id (object): An identification object that must support conversion to unicode. """ super(StudioValidation, self).__init__(xblock_id) self.summary = None def set_summary(self, message): """ Sets a summary message on this instance. The summary is optional. Args: message (ValidationMessage): A validation message to set as this instance's summary. """ if not isinstance(message, ValidationMessage): raise TypeError("Argument must of type ValidationMessage") self.summary = message @property def empty(self): """ Is this object empty (contains no messages and no summary)? Returns: bool: True iff this instance has no validation issues and therefore has no messages or summary. """ return super(StudioValidation, self).empty and not self.summary def to_json(self): """ Convert to a json-serializable representation. Returns: dict: A dict representation that is json-serializable. """ serialized = super(StudioValidation, self).to_json() if self.summary: serialized["summary"] = self.summary.to_json() return serialized
agpl-3.0
bbondy/brianbondy.gae
libs/sx/pisa3/pisa_default.py
15
15340
# -*- coding: ISO-8859-1 -*- # Copyright 2010 Dirk Holtwick, holtwick.it # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __reversion__ = "$Revision: 20 $" __author__ = "$Author: holtwick $" __date__ = "$Date: 2007-10-09 12:58:24 +0200 (Di, 09 Okt 2007) $" from reportlab.lib.pagesizes import * PML_WARNING = "warning" PML_ERROR = "error" PML_EXCEPTION = "PML Exception" PML_PREFIX = "pdf:" #CLASS = 1 BOOL = 2 FONT = 3 COLOR = 4 FILE = 5 SIZE = 6 INT = 7 STRING = 8 BOX = 9 POS = 10 #STYLE = 11 MUST = 23 """ Definition of all known tags. Also used for building the reference """ TAGS = { # FORMAT #"document": (1, { # "format": (["a0", "a1", "a2", "a3", "a4", "a5", "a6", # "b0", "b1", "b2", "b3", "b4", "b5", "b6", # "letter", "legal", "elevenseventeen"], "a4"), # "orientation": ["portrait", "landscape"], # "fullscreen": (BOOL, "0"), # "author": (STRING, ""), # "subject": (STRING, ""), # "title": (STRING, ""), # "duration": INT, # "showoutline": (BOOL, "0"), # "outline": INT, # }), "pdftemplate": (1, { "name": (STRING, "body"), "format": (["a0", "a1", "a2", "a3", "a4", "a5", "a6", "b0", "b1", "b2", "b3", "b4", "b5", "b6", "letter", "legal", "elevenseventeen"], "a4"), "orientation": ["portrait", "landscape"], "background": FILE, }), "pdfframe": (0, { "name": (STRING, ""), "box": (BOX, MUST), "border": (BOOL, "0"), "static": (BOOL, "0"), }), #"static": (1, { # "name": STRING, # "box": (BOX, MUST), # "border": (BOOL, "0"), # }), "pdfnexttemplate": (0, { "name": (STRING, "body"), }), "pdfnextpage": (0, { "name": (STRING, ""), # "background": FILE, }), "pdfnextframe": (0, {}), "pdffont": (0, { "src": (FILE, MUST), "name": (STRING, MUST), # "print": (BOOL, "0"), "encoding": (STRING, "WinAnsiEncoding"), }), "pdfdrawline": (0, { "from": (POS, MUST), "to": (POS, MUST), "color": (COLOR, "#000000"), "width": (SIZE, 1), }), "drawpoint": (0, { "pos": (POS, MUST), "color": (COLOR, "#000000"), "width": (SIZE, 1), }), "pdfdrawlines": (0, { "coords": (STRING, MUST), "color": (COLOR, "#000000"), "width": (SIZE, 1), }), "pdfdrawstring": (0, { "pos": (POS, MUST), "text": (STRING, MUST), "color": (COLOR, "#000000"), "align": (["left", "center", "right"], "right"), "valign": (["top", "middle", "bottom"], "bottom"), # "class": CLASS, "rotate": (INT, "0"), }), "pdfdrawimg": (0, { "pos": (POS, MUST), "src": (FILE, MUST), "width": SIZE, "height": SIZE, "align": (["left", "center", "right"], "right"), "valign": (["top", "middle", "bottom"], "bottom"), }), "pdfspacer" : (0, { "height": (SIZE, MUST), }), "pdfpagenumber": (0, { "example": (STRING, "0"), }), "pdftoc": (0, { }), "pdfversion": (0, { }), "pdfkeeptogether": (1, { }), "pdfkeepinframe": (1, { "maxwidth": SIZE, "maxheight": SIZE, "mergespace": (INT, 1), "mode": (["error", "overflow", "shrink", "truncate"], "shrink"), "name": (STRING, "") }), # The chart example, see pml_charts "pdfchart": (1, { "type": (["spider","bar"], "bar"), "strokecolor": (COLOR, "#000000"), "width": (SIZE, MUST), "height": (SIZE, MUST), }), "pdfchartdata": (0, { "set": (STRING, MUST), "value": (STRING), # "label": (STRING), "strokecolor": (COLOR), "fillcolor": (COLOR), "strokewidth": (SIZE), }), "pdfchartlabel": (0, { "value": (STRING, MUST), }), "pdfbarcode": (0, { "value": (STRING, MUST), "align": (["left", "center", "right"], "left"), }), # ======================================================== "link": (0, { "href": (STRING, MUST), "rel": (STRING, ""), "type": (STRING, ""), "media": (STRING, "all"), "charset": (STRING, "latin1"), # XXX Must be something else... }), "meta": (0, { "name": (STRING, ""), "content": (STRING, ""), }), "style": (0, { "type": (STRING, ""), "media": (STRING, "all"), }), "img": (0, { "src": (FILE, MUST), "width": SIZE, "height": SIZE, "align": ["top", "middle", "bottom", "left", "right", "texttop", "absmiddle", "absbottom", "baseline"], }), "table": (1, { "align": (["left", "center", "right"], "left"), "valign": (["top", "bottom", "middle"], "middle"), "border": (SIZE, "0"), "bordercolor": (COLOR, "#000000"), "bgcolor": COLOR, "cellpadding": (SIZE, "0"), "cellspacing": (SIZE, "0"), "repeat": (INT, "0"), # XXX Remove this! Set to 0 "width": STRING, #"keepmaxwidth": SIZE, #"keepmaxheight": SIZE, #"keepmergespace": (INT, 1), #"keepmode": (["error", "overflow", "shrink", "truncate"], "shrink"), }), "tr": (1, { "bgcolor": COLOR, "valign": ["top", "bottom", "middle"], "border": SIZE, "bordercolor": (COLOR, "#000000"), }), "td": (1, { "align": ["left", "center", "right", "justify"], "valign": ["top", "bottom", "middle"], "width": STRING, "bgcolor": COLOR, "border": SIZE, "bordercolor": (COLOR, "#000000"), "colspan": INT, "rowspan": INT, }), "th": (1, { "align": ["left", "center", "right", "justify"], "valign": ["top", "bottom", "middle"], "width": STRING, "bgcolor": COLOR, "border": SIZE, "bordercolor": (COLOR, "#000000"), "colspan": INT, "rowspan": INT, }), "dl": (1, { }), "dd": (1, { }), "dt": (1, { }), "ol": (1, { "type": (["1", "a", "A", "i", "I"], "1"), }), "ul": (1, { "type": (["circle", "disk", "square"], "disk"), }), "li": (1, { }), "hr": (0, { "color": (COLOR, "#000000"), "size": (SIZE, "1"), "align": ["left", "center", "right", "justify"], }), "div": (1, { "align": ["left", "center", "right", "justify"], }), "p": (1, { "align": ["left", "center", "right", "justify"], }), "br": (0, { }), "h1": (1, { "outline": STRING, "closed": (INT, 0), "align": ["left", "center", "right", "justify"], }), "h2": (1, { "outline": STRING, "closed": (INT, 0), "align": ["left", "center", "right", "justify"], }), "h3": (1, { "outline": STRING, "closed": (INT, 0), "align": ["left", "center", "right", "justify"], }), "h4": (1, { "outline": STRING, "closed": (INT, 0), "align": ["left", "center", "right", "justify"], }), "h5": (1, { "outline": STRING, "closed": (INT, 0), "align": ["left", "center", "right", "justify"], }), "h6": (1, { "outline": STRING, "closed": (INT, 0), "align": ["left", "center", "right", "justify"], }), "font": (1, { "face": FONT, "color": COLOR, "size": STRING, }), "a": (1, { "href": STRING, "name": STRING, }), "input": (0, { "name": STRING, "value": STRING, "type": (["text", "hidden", "checkbox"], "text"), }), "textarea": (1, { "name": STRING, }), "select": (1, { "name": STRING, "value": STRING, }), "option": (0, { "value": STRING, }), } # XXX use "html" not "*" as default! DEFAULT_CSS = """ html { font-family: Helvetica; font-size: 10px; font-weight: normal; color: #000000; background-color: transparent; margin: 0; padding: 0; line-height: 150%; border: 1px none; display: inline; width: auto; height: auto; white-space: normal; } b, strong { font-weight: bold; } i, em { font-style: italic; } u { text-decoration: underline; } s, strike { text-decoration: line-through; } a { text-decoration: underline; color: blue; } ins { color: green; text-decoration: underline; } del { color: red; text-decoration: line-through; } pre, code, kbd, samp, tt { font-family: "Courier New"; } h1, h2, h3, h4, h5, h6 { font-weight:bold; -pdf-outline: true; -pdf-outline-open: false; } h1 { /*18px via YUI Fonts CSS foundation*/ font-size:138.5%; -pdf-outline-level: 0; } h2 { /*16px via YUI Fonts CSS foundation*/ font-size:123.1%; -pdf-outline-level: 1; } h3 { /*14px via YUI Fonts CSS foundation*/ font-size:108%; -pdf-outline-level: 2; } h4 { -pdf-outline-level: 3; } h5 { -pdf-outline-level: 4; } h6 { -pdf-outline-level: 5; } h1, h2, h3, h4, h5, h6, p, pre, hr { margin:1em 0; } address, blockquote, body, center, dl, dir, div, fieldset, form, h1, h2, h3, h4, h5, h6, hr, isindex, menu, noframes, noscript, ol, p, pre, table, th, tr, td, ul, li, dd, dt, pdftoc { display: block; } table { -pdf-keep-in-frame-mode: shrink; } tr, th, td { vertical-align: middle; width: auto; } th { text-align: center; font-weight: bold; } center { text-align: center; } big { font-size: 125%; } small { font-size: 75%; } ul { margin-left: 1.5em; list-style-type: disc; } ul ul { list-style-type: circle; } ul ul ul { list-style-type: square; } ol { list-style-type: decimal; margin-left: 1.5em; } pre { white-space: pre; } blockquote { margin-left: 1.5em; margin-right: 1.5em; } noscript { display: none; } """ DEFAULT_FONT = { "courier": "Courier", "courier-bold": "Courier-Bold", "courier-boldoblique": "Courier-BoldOblique", "courier-oblique": "Courier-Oblique", "helvetica": "Helvetica", "helvetica-bold": "Helvetica-Bold", "helvetica-boldoblique": "Helvetica-BoldOblique", "helvetica-oblique": "Helvetica-Oblique", "times": "Times-Roman", "times-roman": "Times-Roman", "times-bold": "Times-Bold", "times-boldoblique": "Times-BoldOblique", "times-oblique": "Times-Oblique", "symbol": "Symbol", "zapfdingbats": "ZapfDingbats", "zapf-dingbats": "ZapfDingbats", # Alias "arial": "Helvetica", "times new roman": "Times-Roman", "georgia": "Times-Roman", 'serif':'Times-Roman', 'sansserif':'Helvetica', 'sans':'Helvetica', 'monospaced':'Courier', 'monospace':'Courier', 'mono':'Courier', 'courier new':'Courier', 'verdana':'Helvetica', 'geneva':'Helvetica', } PML_PAGESIZES = { "a0": A0, "a1": A1, "a2": A2, "a3": A3, "a4": A4, "a5": A5, "a6": A6, "b0": B0, "b1": B1, "b2": B2, "b3": B3, "b4": B4, "b5": B5, "b6": B6, "letter": LETTER, "legal": LEGAL, "ledger": ELEVENSEVENTEEN, "elevenseventeen": ELEVENSEVENTEEN, }
mit
AunShiLord/sympy
sympy/core/tests/test_subs.py
24
20537
from __future__ import division from sympy import (Symbol, Wild, sin, cos, exp, sqrt, pi, Function, Derivative, abc, Integer, Eq, symbols, Add, I, Float, log, Rational, Lambda, atan2, cse, cot, tan, S, Tuple, Basic, Dict, Piecewise, oo, Mul, factor, nsimplify, zoo, Subs) from sympy.core.basic import _aresame from sympy.utilities.pytest import XFAIL from sympy.abc import x, y, z def test_subs(): n3 = Rational(3) e = x e = e.subs(x, n3) assert e == Rational(3) e = 2*x assert e == 2*x e = e.subs(x, n3) assert e == Rational(6) def test_trigonometric(): n3 = Rational(3) e = (sin(x)**2).diff(x) assert e == 2*sin(x)*cos(x) e = e.subs(x, n3) assert e == 2*cos(n3)*sin(n3) e = (sin(x)**2).diff(x) assert e == 2*sin(x)*cos(x) e = e.subs(sin(x), cos(x)) assert e == 2*cos(x)**2 assert exp(pi).subs(exp, sin) == 0 assert cos(exp(pi)).subs(exp, sin) == 1 i = Symbol('i', integer=True) zoo = S.ComplexInfinity assert tan(x).subs(x, pi/2) is zoo assert cot(x).subs(x, pi) is zoo assert cot(i*x).subs(x, pi) is zoo assert tan(i*x).subs(x, pi/2) == tan(i*pi/2) assert tan(i*x).subs(x, pi/2).subs(i, 1) is zoo o = Symbol('o', odd=True) assert tan(o*x).subs(x, pi/2) == tan(o*pi/2) def test_powers(): assert sqrt(1 - sqrt(x)).subs(x, 4) == I assert (sqrt(1 - x**2)**3).subs(x, 2) == - 3*I*sqrt(3) assert (x**Rational(1, 3)).subs(x, 27) == 3 assert (x**Rational(1, 3)).subs(x, -27) == 3*(-1)**Rational(1, 3) assert ((-x)**Rational(1, 3)).subs(x, 27) == 3*(-1)**Rational(1, 3) n = Symbol('n', negative=True) assert (x**n).subs(x, 0) is S.ComplexInfinity assert exp(-1).subs(S.Exp1, 0) is S.ComplexInfinity assert (x**(4.0*y)).subs(x**(2.0*y), n) == n**2.0 assert (2**(x + 2)).subs(2, 3) == 3**(x + 3) def test_logexppow(): # no eval() x = Symbol('x', real=True) w = Symbol('w') e = (3**(1 + x) + 2**(1 + x))/(3**x + 2**x) assert e.subs(2**x, w) != e assert e.subs(exp(x*log(Rational(2))), w) != e def test_bug(): x1 = Symbol('x1') x2 = Symbol('x2') y = x1*x2 assert y.subs(x1, Float(3.0)) == Float(3.0)*x2 def test_subbug1(): # see that they don't fail (x**x).subs(x, 1) (x**x).subs(x, 1.0) def test_subbug2(): # Ensure this does not cause infinite recursion assert Float(7.7).epsilon_eq(abs(x).subs(x, -7.7)) def test_dict_set(): a, b, c = map(Wild, 'abc') f = 3*cos(4*x) r = f.match(a*cos(b*x)) assert r == {a: 3, b: 4} e = a/b*sin(b*x) assert e.subs(r) == r[a]/r[b]*sin(r[b]*x) assert e.subs(r) == 3*sin(4*x) / 4 s = set(r.items()) assert e.subs(s) == r[a]/r[b]*sin(r[b]*x) assert e.subs(s) == 3*sin(4*x) / 4 assert e.subs(r) == r[a]/r[b]*sin(r[b]*x) assert e.subs(r) == 3*sin(4*x) / 4 assert x.subs(Dict((x, 1))) == 1 def test_dict_ambigous(): # see issue 3566 y = Symbol('y') z = Symbol('z') f = x*exp(x) g = z*exp(z) df = {x: y, exp(x): y} dg = {z: y, exp(z): y} assert f.subs(df) == y**2 assert g.subs(dg) == y**2 # and this is how order can affect the result assert f.subs(x, y).subs(exp(x), y) == y*exp(y) assert f.subs(exp(x), y).subs(x, y) == y**2 # length of args and count_ops are the same so # default_sort_key resolves ordering...if one # doesn't want this result then an unordered # sequence should not be used. e = 1 + x*y assert e.subs({x: y, y: 2}) == 5 # here, there are no obviously clashing keys or values # but the results depend on the order assert exp(x/2 + y).subs(dict([(exp(y + 1), 2), (x, 2)])) == exp(y + 1) def test_deriv_sub_bug3(): y = Symbol('y') f = Function('f') pat = Derivative(f(x), x, x) assert pat.subs(y, y**2) == Derivative(f(x), x, x) assert pat.subs(y, y**2) != Derivative(f(x), x) def test_equality_subs1(): f = Function('f') x = abc.x eq = Eq(f(x)**2, x) res = Eq(Integer(16), x) assert eq.subs(f(x), 4) == res def test_equality_subs2(): f = Function('f') x = abc.x eq = Eq(f(x)**2, 16) assert bool(eq.subs(f(x), 3)) is False assert bool(eq.subs(f(x), 4)) is True def test_issue_3742(): y = Symbol('y') e = sqrt(x)*exp(y) assert e.subs(sqrt(x), 1) == exp(y) def test_subs_dict1(): x, y = symbols('x y') assert (1 + x*y).subs(x, pi) == 1 + pi*y assert (1 + x*y).subs({x: pi, y: 2}) == 1 + 2*pi c2, c3, q1p, q2p, c1, s1, s2, s3 = symbols('c2 c3 q1p q2p c1 s1 s2 s3') test = (c2**2*q2p*c3 + c1**2*s2**2*q2p*c3 + s1**2*s2**2*q2p*c3 - c1**2*q1p*c2*s3 - s1**2*q1p*c2*s3) assert (test.subs({c1**2: 1 - s1**2, c2**2: 1 - s2**2, c3**3: 1 - s3**2}) == c3*q2p*(1 - s2**2) + c3*q2p*s2**2*(1 - s1**2) - c2*q1p*s3*(1 - s1**2) + c3*q2p*s1**2*s2**2 - c2*q1p*s3*s1**2) def test_mul(): x, y, z, a, b, c = symbols('x y z a b c') A, B, C = symbols('A B C', commutative=0) assert (x*y*z).subs(z*x, y) == y**2 assert (z*x).subs(1/x, z) == z*x assert (x*y/z).subs(1/z, a) == a*x*y assert (x*y/z).subs(x/z, a) == a*y assert (x*y/z).subs(y/z, a) == a*x assert (x*y/z).subs(x/z, 1/a) == y/a assert (x*y/z).subs(x, 1/a) == y/(z*a) assert (2*x*y).subs(5*x*y, z) != 2*z/5 assert (x*y*A).subs(x*y, a) == a*A assert (x**2*y**(3*x/2)).subs(x*y**(x/2), 2) == 4*y**(x/2) assert (x*exp(x*2)).subs(x*exp(x), 2) == 2*exp(x) assert ((x**(2*y))**3).subs(x**y, 2) == 64 assert (x*A*B).subs(x*A, y) == y*B assert (x*y*(1 + x)*(1 + x*y)).subs(x*y, 2) == 6*(1 + x) assert ((1 + A*B)*A*B).subs(A*B, x*A*B) assert (x*a/z).subs(x/z, A) == a*A assert (x**3*A).subs(x**2*A, a) == a*x assert (x**2*A*B).subs(x**2*B, a) == a*A assert (x**2*A*B).subs(x**2*A, a) == a*B assert (b*A**3/(a**3*c**3)).subs(a**4*c**3*A**3/b**4, z) == \ b*A**3/(a**3*c**3) assert (6*x).subs(2*x, y) == 3*y assert (y*exp(3*x/2)).subs(y*exp(x), 2) == 2*exp(x/2) assert (y*exp(3*x/2)).subs(y*exp(x), 2) == 2*exp(x/2) assert (A**2*B*A**2*B*A**2).subs(A*B*A, C) == A*C**2*A assert (x*A**3).subs(x*A, y) == y*A**2 assert (x**2*A**3).subs(x*A, y) == y**2*A assert (x*A**3).subs(x*A, B) == B*A**2 assert (x*A*B*A*exp(x*A*B)).subs(x*A, B) == B**2*A*exp(B*B) assert (x**2*A*B*A*exp(x*A*B)).subs(x*A, B) == B**3*exp(B**2) assert (x**3*A*exp(x*A*B)*A*exp(x*A*B)).subs(x*A, B) == \ x*B*exp(B**2)*B*exp(B**2) assert (x*A*B*C*A*B).subs(x*A*B, C) == C**2*A*B assert (-I*a*b).subs(a*b, 2) == -2*I # issue 6361 assert (-8*I*a).subs(-2*a, 1) == 4*I assert (-I*a).subs(-a, 1) == I # issue 6441 assert (4*x**2).subs(2*x, y) == y**2 assert (2*4*x**2).subs(2*x, y) == 2*y**2 assert (-x**3/9).subs(-x/3, z) == -z**2*x assert (-x**3/9).subs(x/3, z) == -z**2*x assert (-2*x**3/9).subs(x/3, z) == -2*x*z**2 assert (-2*x**3/9).subs(-x/3, z) == -2*x*z**2 assert (-2*x**3/9).subs(-2*x, z) == z*x**2/9 assert (-2*x**3/9).subs(2*x, z) == -z*x**2/9 assert (2*(3*x/5/7)**2).subs(3*x/5, z) == 2*(S(1)/7)**2*z**2 assert (4*x).subs(-2*x, z) == 4*x # try keep subs literal def test_subs_simple(): a = symbols('a', commutative=True) x = symbols('x', commutative=False) assert (2*a).subs(1, 3) == 2*a assert (2*a).subs(2, 3) == 3*a assert (2*a).subs(a, 3) == 6 assert sin(2).subs(1, 3) == sin(2) assert sin(2).subs(2, 3) == sin(3) assert sin(a).subs(a, 3) == sin(3) assert (2*x).subs(1, 3) == 2*x assert (2*x).subs(2, 3) == 3*x assert (2*x).subs(x, 3) == 6 assert sin(x).subs(x, 3) == sin(3) def test_subs_constants(): a, b = symbols('a b', commutative=True) x, y = symbols('x y', commutative=False) assert (a*b).subs(2*a, 1) == a*b assert (1.5*a*b).subs(a, 1) == 1.5*b assert (2*a*b).subs(2*a, 1) == b assert (2*a*b).subs(4*a, 1) == 2*a*b assert (x*y).subs(2*x, 1) == x*y assert (1.5*x*y).subs(x, 1) == 1.5*y assert (2*x*y).subs(2*x, 1) == y assert (2*x*y).subs(4*x, 1) == 2*x*y def test_subs_commutative(): a, b, c, d, K = symbols('a b c d K', commutative=True) assert (a*b).subs(a*b, K) == K assert (a*b*a*b).subs(a*b, K) == K**2 assert (a*a*b*b).subs(a*b, K) == K**2 assert (a*b*c*d).subs(a*b*c, K) == d*K assert (a*b**c).subs(a, K) == K*b**c assert (a*b**c).subs(b, K) == a*K**c assert (a*b**c).subs(c, K) == a*b**K assert (a*b*c*b*a).subs(a*b, K) == c*K**2 assert (a**3*b**2*a).subs(a*b, K) == a**2*K**2 def test_subs_noncommutative(): w, x, y, z, L = symbols('w x y z L', commutative=False) assert (x*y).subs(x*y, L) == L assert (w*y*x).subs(x*y, L) == w*y*x assert (w*x*y*z).subs(x*y, L) == w*L*z assert (x*y*x*y).subs(x*y, L) == L**2 assert (x*x*y).subs(x*y, L) == x*L assert (x*x*y*y).subs(x*y, L) == x*L*y assert (w*x*y).subs(x*y*z, L) == w*x*y assert (x*y**z).subs(x, L) == L*y**z assert (x*y**z).subs(y, L) == x*L**z assert (x*y**z).subs(z, L) == x*y**L assert (w*x*y*z*x*y).subs(x*y*z, L) == w*L*x*y assert (w*x*y*y*w*x*x*y*x*y*y*x*y).subs(x*y, L) == w*L*y*w*x*L**2*y*L def test_subs_basic_funcs(): a, b, c, d, K = symbols('a b c d K', commutative=True) w, x, y, z, L = symbols('w x y z L', commutative=False) assert (x + y).subs(x + y, L) == L assert (x - y).subs(x - y, L) == L assert (x/y).subs(x, L) == L/y assert (x**y).subs(x, L) == L**y assert (x**y).subs(y, L) == x**L assert ((a - c)/b).subs(b, K) == (a - c)/K assert (exp(x*y - z)).subs(x*y, L) == exp(L - z) assert (a*exp(x*y - w*z) + b*exp(x*y + w*z)).subs(z, 0) == \ a*exp(x*y) + b*exp(x*y) assert ((a - b)/(c*d - a*b)).subs(c*d - a*b, K) == (a - b)/K assert (w*exp(a*b - c)*x*y/4).subs(x*y, L) == w*exp(a*b - c)*L/4 def test_subs_wild(): R, S, T, U = symbols('R S T U', cls=Wild) assert (R*S).subs(R*S, T) == T assert (S*R).subs(R*S, T) == T assert (R + S).subs(R + S, T) == T assert (R**S).subs(R, T) == T**S assert (R**S).subs(S, T) == R**T assert (R*S**T).subs(R, U) == U*S**T assert (R*S**T).subs(S, U) == R*U**T assert (R*S**T).subs(T, U) == R*S**U def test_subs_mixed(): a, b, c, d, K = symbols('a b c d K', commutative=True) w, x, y, z, L = symbols('w x y z L', commutative=False) R, S, T, U = symbols('R S T U', cls=Wild) assert (a*x*y).subs(x*y, L) == a*L assert (a*b*x*y*x).subs(x*y, L) == a*b*L*x assert (R*x*y*exp(x*y)).subs(x*y, L) == R*L*exp(L) assert (a*x*y*y*x - x*y*z*exp(a*b)).subs(x*y, L) == a*L*y*x - L*z*exp(a*b) e = c*y*x*y*x**(R*S - a*b) - T*(a*R*b*S) assert e.subs(x*y, L).subs(a*b, K).subs(R*S, U) == \ c*y*L*x**(U - K) - T*(U*K) def test_division(): a, b, c = symbols('a b c', commutative=True) x, y, z = symbols('x y z', commutative=True) assert (1/a).subs(a, c) == 1/c assert (1/a**2).subs(a, c) == 1/c**2 assert (1/a**2).subs(a, -2) == Rational(1, 4) assert (-(1/a**2)).subs(a, -2) == -Rational(1, 4) assert (1/x).subs(x, z) == 1/z assert (1/x**2).subs(x, z) == 1/z**2 assert (1/x**2).subs(x, -2) == Rational(1, 4) assert (-(1/x**2)).subs(x, -2) == -Rational(1, 4) #issue 5360 assert (1/x).subs(x, 0) == 1/S(0) def test_add(): a, b, c, d, x, y, t = symbols('a b c d x y t') assert (a**2 - b - c).subs(a**2 - b, d) in [d - c, a**2 - b - c] assert (a**2 - c).subs(a**2 - c, d) == d assert (a**2 - b - c).subs(a**2 - c, d) in [d - b, a**2 - b - c] assert (a**2 - x - c).subs(a**2 - c, d) in [d - x, a**2 - x - c] assert (a**2 - b - sqrt(a)).subs(a**2 - sqrt(a), c) == c - b assert (a + b + exp(a + b)).subs(a + b, c) == c + exp(c) assert (c + b + exp(c + b)).subs(c + b, a) == a + exp(a) assert (a + b + c + d).subs(b + c, x) == a + d + x assert (a + b + c + d).subs(-b - c, x) == a + d - x assert ((x + 1)*y).subs(x + 1, t) == t*y assert ((-x - 1)*y).subs(x + 1, t) == -t*y assert ((x - 1)*y).subs(x + 1, t) == y*(t - 2) assert ((-x + 1)*y).subs(x + 1, t) == y*(-t + 2) # this should work everytime: e = a**2 - b - c assert e.subs(Add(*e.args[:2]), d) == d + e.args[2] assert e.subs(a**2 - c, d) == d - b # the fallback should recognize when a change has # been made; while .1 == Rational(1, 10) they are not the same # and the change should be made assert (0.1 + a).subs(0.1, Rational(1, 10)) == Rational(1, 10) + a e = (-x*(-y + 1) - y*(y - 1)) ans = (-x*(x) - y*(-x)).expand() assert e.subs(-y + 1, x) == ans def test_subs_issue_4009(): assert (I*Symbol('a')).subs(1, 2) == I*Symbol('a') def test_functions_subs(): x, y = symbols('x y') f, g = symbols('f g', cls=Function) l = Lambda((x, y), sin(x) + y) assert (g(y, x) + cos(x)).subs(g, l) == sin(y) + x + cos(x) assert (f(x)**2).subs(f, sin) == sin(x)**2 assert (f(x, y)).subs(f, log) == log(x, y) assert (f(x, y)).subs(f, sin) == f(x, y) assert (sin(x) + atan2(x, y)).subs([[atan2, f], [sin, g]]) == \ f(x, y) + g(x) assert (g(f(x + y, x))).subs([[f, l], [g, exp]]) == exp(x + sin(x + y)) def test_derivative_subs(): y = Symbol('y') f = Function('f') assert Derivative(f(x), x).subs(f(x), y) != 0 assert Derivative(f(x), x).subs(f(x), y).subs(y, f(x)) == \ Derivative(f(x), x) # issues 5085, 5037 assert cse(Derivative(f(x), x) + f(x))[1][0].has(Derivative) assert cse(Derivative(f(x, y), x) + Derivative(f(x, y), y))[1][0].has(Derivative) def test_derivative_subs2(): x, y, z = symbols('x y z') f, g = symbols('f g', cls=Function) assert Derivative(f, x, y).subs(Derivative(f, x, y), g) == g assert Derivative(f, y, x).subs(Derivative(f, x, y), g) == g assert Derivative(f, x, y).subs(Derivative(f, x), g) == Derivative(g, y) assert Derivative(f, x, y).subs(Derivative(f, y), g) == Derivative(g, x) assert (Derivative(f(x, y, z), x, y, z).subs( Derivative(f(x, y, z), x, z), g) == Derivative(g, y)) assert (Derivative(f(x, y, z), x, y, z).subs( Derivative(f(x, y, z), z, y), g) == Derivative(g, x)) assert (Derivative(f(x, y, z), x, y, z).subs( Derivative(f(x, y, z), z, y, x), g) == g) def test_derivative_subs3(): x = Symbol('x') dex = Derivative(exp(x), x) assert Derivative(dex, x).subs(dex, exp(x)) == dex assert dex.subs(exp(x), dex) == Derivative(exp(x), x, x) def test_issue_5284(): A, B = symbols('A B', commutative=False) assert (x*A).subs(x**2*A, B) == x*A assert (A**2).subs(A**3, B) == A**2 assert (A**6).subs(A**3, B) == B**2 def test_subs_iter(): assert x.subs(reversed([[x, y]])) == y it = iter([[x, y]]) assert x.subs(it) == y assert x.subs(Tuple((x, y))) == y def test_subs_dict(): a, b, c, d, e = symbols('a b c d e') z = symbols('z') assert (2*x + y + z).subs(dict(x=1, y=2)) == 4 + z l = [(sin(x), 2), (x, 1)] assert (sin(x)).subs(l) == \ (sin(x)).subs(dict(l)) == 2 assert sin(x).subs(reversed(l)) == sin(1) expr = sin(2*x) + sqrt(sin(2*x))*cos(2*x)*sin(exp(x)*x) reps = dict([ (sin(2*x), c), (sqrt(sin(2*x)), a), (cos(2*x), b), (exp(x), e), (x, d), ]) assert expr.subs(reps) == c + a*b*sin(d*e) l = [(x, 3), (y, x**2)] assert (x + y).subs(l) == 3 + x**2 assert (x + y).subs(reversed(l)) == 12 # If changes are made to convert lists into dictionaries and do # a dictionary-lookup replacement, these tests will help to catch # some logical errors that might occur l = [(y, z + 2), (1 + z, 5), (z, 2)] assert (y - 1 + 3*x).subs(l) == 5 + 3*x l = [(y, z + 2), (z, 3)] assert (y - 2).subs(l) == 3 def test_no_arith_subs_on_floats(): a, x, y = symbols('a x y') assert (x + 3).subs(x + 3, a) == a assert (x + 3).subs(x + 2, a) == a + 1 assert (x + y + 3).subs(x + 3, a) == a + y assert (x + y + 3).subs(x + 2, a) == a + y + 1 assert (x + 3.0).subs(x + 3.0, a) == a assert (x + 3.0).subs(x + 2.0, a) == x + 3.0 assert (x + y + 3.0).subs(x + 3.0, a) == a + y assert (x + y + 3.0).subs(x + 2.0, a) == x + y + 3.0 def test_issue_5651(): a, b, c, K = symbols('a b c K', commutative=True) x, y, z = symbols('x y z') assert (a/(b*c)).subs(b*c, K) == a/K assert (a/(b**2*c**3)).subs(b*c, K) == a/(c*K**2) assert (1/(x*y)).subs(x*y, 2) == S.Half assert ((1 + x*y)/(x*y)).subs(x*y, 1) == 2 assert (x*y*z).subs(x*y, 2) == 2*z assert ((1 + x*y)/(x*y)/z).subs(x*y, 1) == 2/z def test_issue_6075(): assert Tuple(1, True).subs(1, 2) == Tuple(2, True) def test_issue_6079(): # since x + 2.0 == x + 2 we can't do a simple equality test x = symbols('x') assert _aresame((x + 2.0).subs(2, 3), x + 2.0) assert _aresame((x + 2.0).subs(2.0, 3), x + 3) assert not _aresame(x + 2, x + 2.0) assert not _aresame(Basic(cos, 1), Basic(cos, 1.)) assert _aresame(cos, cos) assert not _aresame(1, S(1)) assert not _aresame(x, symbols('x', positive=True)) def test_issue_4680(): N = Symbol('N') assert N.subs(dict(N=3)) == 3 def test_issue_6158(): assert (x - 1).subs(1, y) == x - y assert (x - 1).subs(-1, y) == x + y assert (x - oo).subs(oo, y) == x - y assert (x - oo).subs(-oo, y) == x + y def test_Function_subs(): from sympy.abc import x, y f, g, h, i = symbols('f g h i', cls=Function) p = Piecewise((g(f(x, y)), x < -1), (g(x), x <= 1)) assert p.subs(g, h) == Piecewise((h(f(x, y)), x < -1), (h(x), x <= 1)) assert (f(y) + g(x)).subs({f: h, g: i}) == i(x) + h(y) def test_simultaneous_subs(): reps = {x: 0, y: 0} assert (x/y).subs(reps) != (y/x).subs(reps) assert (x/y).subs(reps, simultaneous=True) == \ (y/x).subs(reps, simultaneous=True) reps = reps.items() assert (x/y).subs(reps) != (y/x).subs(reps) assert (x/y).subs(reps, simultaneous=True) == \ (y/x).subs(reps, simultaneous=True) assert Derivative(x, y, z).subs(reps, simultaneous=True) == \ Subs(Derivative(0, y, z), (y,), (0,)) def test_issue_6419_6421(): assert (1/(1 + x/y)).subs(x/y, x) == 1/(1 + x) assert (-2*I).subs(2*I, x) == -x assert (-I*x).subs(I*x, x) == -x assert (-3*I*y**4).subs(3*I*y**2, x) == -x*y**2 def test_issue_6559(): assert (-12*x + y).subs(-x, 1) == 12 + y # though this involves cse it generated a failure in Mul._eval_subs x0, x1 = symbols('x0 x1') e = -log(-12*sqrt(2) + 17)/24 - log(-2*sqrt(2) + 3)/12 + sqrt(2)/3 # XXX modify cse so x1 is eliminated and x0 = -sqrt(2)? assert cse(e) == ( [(x0, sqrt(2))], [x0/3 - log(-12*x0 + 17)/24 - log(-2*x0 + 3)/12]) def test_issue_5261(): x = symbols('x', real=True) e = I*x assert exp(e).subs(exp(x), y) == y**I assert (2**e).subs(2**x, y) == y**I eq = (-2)**e assert eq.subs((-2)**x, y) == eq def test_issue_6923(): assert (-2*x*sqrt(2)).subs(2*x, y) == -sqrt(2)*y def test_2arg_hack(): N = Symbol('N', commutative=False) ans = Mul(2, y + 1, evaluate=False) assert (2*x*(y + 1)).subs(x, 1, hack2=True) == ans assert (2*(y + 1 + N)).subs(N, 0, hack2=True) == ans @XFAIL def test_mul2(): """When this fails, remove things labelled "2-arg hack" 1) remove special handling in the fallback of subs that was added in the same commit as this test 2) remove the special handling in Mul.flatten """ assert (2*(x + 1)).is_Mul def test_noncommutative_subs(): x,y = symbols('x,y', commutative=False) assert (x*y*x).subs([(x,x*y),(y,x)],simultaneous=True) == (x*y*x**2*y) def test_issue_2877(): f = Float(2.0) assert (x + f).subs({f: 2}) == x + 2 def r(a,b,c): return factor(a*x**2 + b*x + c) e = r(5/6, 10, 5) assert nsimplify(e) == 5*x**2/6 + 10*x + 5 def test_issue_5910(): t = Symbol('t') assert (1/(1 - t)).subs(t, 1) == zoo n = t d = t - 1 assert (n/d).subs(t, 1) == zoo assert (-n/-d).subs(t, 1) == zoo def test_issue_5217(): s = Symbol('s') z = (1 - 2*x*x) w = (1 + 2*x*x) q = 2*x*x*2*y*y sub = {2*x*x: s} assert w.subs(sub) == 1 + s assert z.subs(sub) == 1 - s assert q == 4*x**2*y**2 assert q.subs(sub) == 2*y**2*s
bsd-3-clause
kaiyuanl/gem5
src/arch/x86/isa/insts/x87/control/wait_for_exceptions.py
91
2150
# Copyright (c) 2007 The Hewlett-Packard Development Company # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' # FWAIT '''
bsd-3-clause
egabancho/invenio
invenio/modules/alerts/user_settings.py
4
2615
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2012 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """WebAlert User Settings""" from invenio.base.i18n import _ from invenio.ext.template import render_template_to_string from invenio.modules.alerts.models import UserQueryBasket from flask.ext.login import current_user from invenio.modules.dashboard.settings import Settings, UserSettingsStorage class WebAlertSettings(Settings): keys = ['webalert_email_notification'] #form_builder = WebAlertUserSettingsForm storage_builder = UserSettingsStorage def __init__(self): super(WebAlertSettings, self).__init__() self.storage = {} #User.query.get(current_user.get_id()).settings self.icon = 'bell' self.title = _('Alerts WIP') #self.view = url_for('webalert.index') #self.edit = url_for('webaccount.edit', name=self.name) def widget(self): uid = current_user.get_id() query_baskets = UserQueryBasket.query.filter( UserQueryBasket.id_user == uid ).all() template = """ {{ _('You own the following') }} <div class="btn-group"> <a class="btn dropdown-toggle" data-toggle="dropdown" href="#"> <strong class="text-info">{{ query_baskets|length }}</strong> {{ _('alerts') }} <span class="caret"></span> </a> <ul class="dropdown-menu"> {%- for a in query_baskets -%} <li> <a href="#"> {{ a.alert_name }} </a> </li> {%- endfor -%} </ul> </div>""" return render_template_to_string(template, _from_string=True, query_baskets=query_baskets) widget.size = 4 @property def is_authorized(self): return current_user.is_authenticated() and \ current_user.is_authorized('usealerts') ## Compulsory plugin interface settings = WebAlertSettings #__all__ = ['WebAlertSettings']
gpl-2.0
dtbell91/zookeepr
zk/model/registration.py
4
2948
"""The application's model objects""" import sqlalchemy as sa from meta import Base from pylons.controllers.util import abort from meta import Session from lib.model import CommaList from person import Person from voucher import Voucher class Registration(Base): __tablename__ = 'registration' id = sa.Column('id', sa.types.Integer, primary_key=True) person_id = sa.Column(sa.types.Integer, sa.ForeignKey('person.id'), unique=True) over18 = sa.Column(sa.types.Boolean) nick = sa.Column(sa.types.Text) shell = sa.Column(sa.types.Text) editor = sa.Column(sa.types.Text) distro = sa.Column(sa.types.Text) vcs = sa.Column(sa.types.Text) silly_description = sa.Column(sa.types.Text) keyid = sa.Column(sa.types.Text) planetfeed = sa.Column(sa.types.Text) voucher_code = sa.Column(sa.types.Text, sa.ForeignKey('voucher.code'), unique=True) diet = sa.Column(sa.types.Text) special = sa.Column(sa.types.Text) partner_name = sa.Column(sa.types.Text) partner_email = sa.Column(sa.types.Text) partner_mobile = sa.Column(sa.types.Text) # checkin = sa.Column(sa.types.Integer) # checkout = sa.Column(sa.types.Integer) prevlca = sa.Column(CommaList) signup = sa.Column(CommaList) creation_timestamp = sa.Column(sa.types.DateTime, nullable=False, default=sa.func.current_timestamp()) last_modification_timestamp = sa.Column(sa.types.DateTime, nullable=False, default=sa.func.current_timestamp(), onupdate=sa.func.current_timestamp()) person = sa.orm.relation(Person, backref=sa.orm.backref('registration', cascade="all, delete-orphan", lazy=True, uselist=False)) voucher = sa.orm.relation(Voucher, uselist=False, primaryjoin='Registration.voucher_code==Voucher.code', foreign_keys=Voucher.code, backref = 'registration', ) def __init__(self, **kwargs): super(Registration, self).__init__(**kwargs) def __repr__(self): return '<Registration id=%r person_id=%r>' % (self.id, self.person_id) def ticket_description(self): for p in self.products: if p.product.category.name.lower() == "ticket": return p.product.description return "" @classmethod def find_by_id(cls, id, abort_404 = True): result = Session.query(Registration).filter_by(id=id).first() if result is None and abort_404: abort(404, "No such Registration object") return result @classmethod def find_by_ids(cls, id_list): return Session.query(Registration).filter(Registration.id.in_(id_list)).all() @classmethod def find_all(cls): return Session.query(Registration).order_by(Registration.id).all()
gpl-2.0
nicobustillos/odoo
addons/decimal_precision/tests/test_qweb_float.py
102
2000
# -*- coding: utf-8 -*- from openerp.tests import common class TestFloatExport(common.TransactionCase): def setUp(self): super(TestFloatExport, self).setUp() self.Model = self.registry('decimal.precision.test') def get_converter(self, name): converter = self.registry('ir.qweb.field.float') column = self.Model._all_columns[name].column return lambda value, options=None: converter.value_to_html( self.cr, self.uid, value, column, options=options, context=None) def test_basic_float(self): converter = self.get_converter('float') self.assertEqual( converter(42.0), "42.0") self.assertEqual( converter(42.12345), "42.12345") converter = self.get_converter('float_2') self.assertEqual( converter(42.0), "42.00") self.assertEqual( converter(42.12345), "42.12") converter = self.get_converter('float_4') self.assertEqual( converter(42.0), '42.0000') self.assertEqual( converter(42.12345), '42.1234') def test_precision_domain(self): DP = self.registry('decimal.precision') DP.create(self.cr, self.uid, { 'name': 'A', 'digits': 2, }) DP.create(self.cr, self.uid, { 'name': 'B', 'digits': 6, }) converter = self.get_converter('float') self.assertEqual( converter(42.0, {'decimal_precision': 'A'}), '42.00') self.assertEqual( converter(42.0, {'decimal_precision': 'B'}), '42.000000') converter = self.get_converter('float_4') self.assertEqual( converter(42.12345, {'decimal_precision': 'A'}), '42.12') self.assertEqual( converter(42.12345, {'decimal_precision': 'B'}), '42.123450')
agpl-3.0
bruderstein/PythonScript
PythonLib/full/encodings/iso2022_jp_2.py
816
1061
# # iso2022_jp_2.py: Python Unicode Codec for ISO2022_JP_2 # # Written by Hye-Shik Chang <perky@FreeBSD.org> # import _codecs_iso2022, codecs import _multibytecodec as mbc codec = _codecs_iso2022.getcodec('iso2022_jp_2') class Codec(codecs.Codec): encode = codec.encode decode = codec.decode class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): codec = codec class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): codec = codec class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): codec = codec class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): codec = codec def getregentry(): return codecs.CodecInfo( name='iso2022_jp_2', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
gpl-2.0
davirussi/coursera-dl
coursera/credentials.py
15
4962
# -*- coding: utf-8 -*- """ Manages the credential information (netrc, passwords, etc). """ import getpass import logging import netrc import os import platform try: import keyring except ImportError: keyring = None KEYRING_SERVICE_NAME = 'coursera-dl' class CredentialsError(BaseException): """ Class to be thrown if the credentials are not found. """ pass def _getenv_or_empty(s): """ Helper function that converts None gotten from the environment to the empty string. """ return os.getenv(s) or "" def get_config_paths(config_name): # pragma: no test """ Return a list of config files paths to try in order, given config file name and possibly a user-specified path. For Windows platforms, there are several paths that can be tried to retrieve the netrc file. There is, however, no "standard way" of doing things. A brief recap of the situation (all file paths are written in Unix convention): 1. By default, Windows does not define a $HOME path. However, some people might define one manually, and many command-line tools imported from Unix will search the $HOME environment variable first. This includes MSYSGit tools (bash, ssh, ...) and Emacs. 2. Windows defines two 'user paths': $USERPROFILE, and the concatenation of the two variables $HOMEDRIVE and $HOMEPATH. Both of these paths point by default to the same location, e.g. C:\\Users\\Username 3. $USERPROFILE cannot be changed, however $HOMEDRIVE and $HOMEPATH can be changed. They are originally intended to be the equivalent of the $HOME path, but there are many known issues with them 4. As for the name of the file itself, most of the tools ported from Unix will use the standard '.dotfile' scheme, but some of these will instead use "_dotfile". Of the latter, the two notable exceptions are vim, which will first try '_vimrc' before '.vimrc' (but it will try both) and git, which will require the user to name its netrc file '_netrc'. Relevant links : http://markmail.org/message/i33ldu4xl5aterrr http://markmail.org/message/wbzs4gmtvkbewgxi http://stackoverflow.com/questions/6031214/ Because the whole thing is a mess, I suggest we tried various sensible defaults until we succeed or have depleted all possibilities. """ if platform.system() != 'Windows': return [None] # Now, we only treat the case of Windows env_vars = [["HOME"], ["HOMEDRIVE", "HOMEPATH"], ["USERPROFILE"], ["SYSTEMDRIVE"]] env_dirs = [] for var_list in env_vars: var_values = [_getenv_or_empty(var) for var in var_list] directory = ''.join(var_values) if not directory: logging.debug('Environment var(s) %s not defined, skipping', var_list) else: env_dirs.append(directory) additional_dirs = ["C:", ""] all_dirs = env_dirs + additional_dirs leading_chars = [".", "_"] res = [''.join([directory, os.sep, lc, config_name]) for directory in all_dirs for lc in leading_chars] return res def authenticate_through_netrc(path=None): """ Return the tuple user / password given a path for the .netrc file. Raises CredentialsError if no valid netrc file is found. """ errors = [] netrc_machine = 'coursera-dl' paths = [path] if path else get_config_paths("netrc") for path in paths: try: logging.debug('Trying netrc file %s', path) auths = netrc.netrc(path).authenticators(netrc_machine) except (IOError, netrc.NetrcParseError) as e: errors.append(e) else: if auths is None: errors.append('Didn\'t find any credentials for ' + netrc_machine) else: return auths[0], auths[2] error_messages = '\n'.join(str(e) for e in errors) raise CredentialsError( 'Did not find valid netrc file:\n' + error_messages) def get_credentials(username=None, password=None, netrc=None, use_keyring=False): """ Return valid username, password tuple. Raises CredentialsError if username or password is missing. """ if netrc: path = None if netrc is True else netrc return authenticate_through_netrc(path) if not username: raise CredentialsError( 'Please provide a username with the -u option, ' 'or a .netrc file with the -n option.') if not password and use_keyring: password = keyring.get_password(KEYRING_SERVICE_NAME, username) if not password: password = getpass.getpass('Coursera password for {0}: '.format(username)) if use_keyring: keyring.set_password(KEYRING_SERVICE_NAME, username, password) return username, password
lgpl-3.0
kimjaejoong/nova
nova/tests/unit/objects/test_fixed_ip.py
46
15204
# Copyright 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import iso8601 import mock import netaddr from oslo_utils import timeutils from nova import exception from nova.objects import fixed_ip from nova.tests.unit import fake_instance from nova.tests.unit.objects import test_network from nova.tests.unit.objects import test_objects fake_fixed_ip = { 'created_at': None, 'updated_at': None, 'deleted_at': None, 'deleted': False, 'id': 123, 'address': '192.168.1.100', 'network_id': None, 'virtual_interface_id': None, 'instance_uuid': None, 'allocated': False, 'leased': False, 'reserved': False, 'host': None, 'network': None, 'virtual_interface': None, 'floating_ips': [], } class _TestFixedIPObject(object): def _compare(self, obj, db_obj): for field in obj.fields: if field in ('default_route', 'floating_ips'): continue if field in fixed_ip.FIXED_IP_OPTIONAL_ATTRS: if obj.obj_attr_is_set(field) and db_obj[field] is not None: obj_val = obj[field].uuid db_val = db_obj[field]['uuid'] else: continue else: obj_val = obj[field] db_val = db_obj[field] if isinstance(obj_val, netaddr.IPAddress): obj_val = str(obj_val) self.assertEqual(db_val, obj_val) @mock.patch('nova.db.fixed_ip_get') def test_get_by_id(self, get): get.return_value = fake_fixed_ip fixedip = fixed_ip.FixedIP.get_by_id(self.context, 123) get.assert_called_once_with(self.context, 123, get_network=False) self._compare(fixedip, fake_fixed_ip) @mock.patch('nova.db.fixed_ip_get') @mock.patch('nova.db.network_get') def test_get_by_id_with_extras(self, network_get, fixed_get): db_fixed = dict(fake_fixed_ip, network=test_network.fake_network) fixed_get.return_value = db_fixed fixedip = fixed_ip.FixedIP.get_by_id(self.context, 123, expected_attrs=['network']) fixed_get.assert_called_once_with(self.context, 123, get_network=True) self._compare(fixedip, db_fixed) self.assertEqual(db_fixed['network']['uuid'], fixedip.network.uuid) self.assertFalse(network_get.called) @mock.patch('nova.db.fixed_ip_get_by_address') def test_get_by_address(self, get): get.return_value = fake_fixed_ip fixedip = fixed_ip.FixedIP.get_by_address(self.context, '1.2.3.4') get.assert_called_once_with(self.context, '1.2.3.4', columns_to_join=[]) self._compare(fixedip, fake_fixed_ip) @mock.patch('nova.db.fixed_ip_get_by_address') @mock.patch('nova.db.network_get') @mock.patch('nova.db.instance_get') def test_get_by_address_with_extras(self, instance_get, network_get, fixed_get): db_fixed = dict(fake_fixed_ip, network=test_network.fake_network, instance=fake_instance.fake_db_instance()) fixed_get.return_value = db_fixed fixedip = fixed_ip.FixedIP.get_by_address(self.context, '1.2.3.4', expected_attrs=['network', 'instance']) fixed_get.assert_called_once_with(self.context, '1.2.3.4', columns_to_join=['network', 'instance']) self._compare(fixedip, db_fixed) self.assertEqual(db_fixed['network']['uuid'], fixedip.network.uuid) self.assertEqual(db_fixed['instance']['uuid'], fixedip.instance.uuid) self.assertFalse(network_get.called) self.assertFalse(instance_get.called) @mock.patch('nova.db.fixed_ip_get_by_address') @mock.patch('nova.db.network_get') @mock.patch('nova.db.instance_get') def test_get_by_address_with_extras_deleted_instance(self, instance_get, network_get, fixed_get): db_fixed = dict(fake_fixed_ip, network=test_network.fake_network, instance=None) fixed_get.return_value = db_fixed fixedip = fixed_ip.FixedIP.get_by_address(self.context, '1.2.3.4', expected_attrs=['network', 'instance']) fixed_get.assert_called_once_with(self.context, '1.2.3.4', columns_to_join=['network', 'instance']) self._compare(fixedip, db_fixed) self.assertEqual(db_fixed['network']['uuid'], fixedip.network.uuid) self.assertIsNone(fixedip.instance) self.assertFalse(network_get.called) self.assertFalse(instance_get.called) @mock.patch('nova.db.fixed_ip_get_by_floating_address') def test_get_by_floating_address(self, get): get.return_value = fake_fixed_ip fixedip = fixed_ip.FixedIP.get_by_floating_address(self.context, '1.2.3.4') get.assert_called_once_with(self.context, '1.2.3.4') self._compare(fixedip, fake_fixed_ip) @mock.patch('nova.db.fixed_ip_get_by_floating_address') def test_get_by_floating_address_none(self, get): get.return_value = None fixedip = fixed_ip.FixedIP.get_by_floating_address(self.context, '1.2.3.4') get.assert_called_once_with(self.context, '1.2.3.4') self.assertIsNone(fixedip) @mock.patch('nova.db.fixed_ip_get_by_network_host') def test_get_by_network_and_host(self, get): get.return_value = fake_fixed_ip fixedip = fixed_ip.FixedIP.get_by_network_and_host(self.context, 123, 'host') get.assert_called_once_with(self.context, 123, 'host') self._compare(fixedip, fake_fixed_ip) @mock.patch('nova.db.fixed_ip_associate') def test_associate(self, associate): associate.return_value = fake_fixed_ip fixedip = fixed_ip.FixedIP.associate(self.context, '1.2.3.4', 'fake-uuid') associate.assert_called_with(self.context, '1.2.3.4', 'fake-uuid', network_id=None, reserved=False) self._compare(fixedip, fake_fixed_ip) @mock.patch('nova.db.fixed_ip_associate_pool') def test_associate_pool(self, associate): associate.return_value = fake_fixed_ip fixedip = fixed_ip.FixedIP.associate_pool(self.context, 123, 'fake-uuid', 'host') associate.assert_called_with(self.context, 123, instance_uuid='fake-uuid', host='host') self._compare(fixedip, fake_fixed_ip) @mock.patch('nova.db.fixed_ip_disassociate') def test_disassociate_by_address(self, disassociate): fixed_ip.FixedIP.disassociate_by_address(self.context, '1.2.3.4') disassociate.assert_called_with(self.context, '1.2.3.4') @mock.patch('nova.db.fixed_ip_disassociate_all_by_timeout') def test_disassociate_all_by_timeout(self, disassociate): now = timeutils.utcnow() now_tz = timeutils.parse_isotime( timeutils.isotime(now)).replace( tzinfo=iso8601.iso8601.Utc()) disassociate.return_value = 123 result = fixed_ip.FixedIP.disassociate_all_by_timeout(self.context, 'host', now) self.assertEqual(123, result) # NOTE(danms): be pedantic about timezone stuff args, kwargs = disassociate.call_args_list[0] self.assertEqual(now_tz, args[2]) self.assertEqual((self.context, 'host'), args[:2]) self.assertEqual({}, kwargs) @mock.patch('nova.db.fixed_ip_create') def test_create(self, create): create.return_value = fake_fixed_ip fixedip = fixed_ip.FixedIP(context=self.context, address='1.2.3.4') fixedip.create() create.assert_called_once_with( self.context, {'address': '1.2.3.4'}) self._compare(fixedip, fake_fixed_ip) @mock.patch('nova.db.fixed_ip_update') def test_save(self, update): update.return_value = fake_fixed_ip fixedip = fixed_ip.FixedIP(context=self.context, address='1.2.3.4', instance_uuid='fake-uuid') self.assertRaises(exception.ObjectActionError, fixedip.save) fixedip.obj_reset_changes(['address']) fixedip.save() update.assert_called_once_with(self.context, '1.2.3.4', {'instance_uuid': 'fake-uuid'}) @mock.patch('nova.db.fixed_ip_disassociate') def test_disassociate(self, disassociate): fixedip = fixed_ip.FixedIP(context=self.context, address='1.2.3.4', instance_uuid='fake-uuid') fixedip.obj_reset_changes() fixedip.disassociate() disassociate.assert_called_once_with(self.context, '1.2.3.4') self.assertIsNone(fixedip.instance_uuid) @mock.patch('nova.db.fixed_ip_get_all') def test_get_all(self, get_all): get_all.return_value = [fake_fixed_ip] fixedips = fixed_ip.FixedIPList.get_all(self.context) self.assertEqual(1, len(fixedips)) get_all.assert_called_once_with(self.context) self._compare(fixedips[0], fake_fixed_ip) @mock.patch('nova.db.fixed_ip_get_by_instance') def test_get_by_instance(self, get): get.return_value = [fake_fixed_ip] fixedips = fixed_ip.FixedIPList.get_by_instance_uuid(self.context, 'fake-uuid') self.assertEqual(1, len(fixedips)) get.assert_called_once_with(self.context, 'fake-uuid') self._compare(fixedips[0], fake_fixed_ip) @mock.patch('nova.db.fixed_ip_get_by_host') def test_get_by_host(self, get): get.return_value = [fake_fixed_ip] fixedips = fixed_ip.FixedIPList.get_by_host(self.context, 'host') self.assertEqual(1, len(fixedips)) get.assert_called_once_with(self.context, 'host') self._compare(fixedips[0], fake_fixed_ip) @mock.patch('nova.db.fixed_ips_by_virtual_interface') def test_get_by_virtual_interface_id(self, get): get.return_value = [fake_fixed_ip] fixedips = fixed_ip.FixedIPList.get_by_virtual_interface_id( self.context, 123) self.assertEqual(1, len(fixedips)) get.assert_called_once_with(self.context, 123) self._compare(fixedips[0], fake_fixed_ip) def test_floating_ips_do_not_lazy_load(self): fixedip = fixed_ip.FixedIP() self.assertRaises(NotImplementedError, lambda: fixedip.floating_ips) @mock.patch('nova.db.fixed_ip_bulk_create') def test_bulk_create(self, bulk): fixed_ips = [fixed_ip.FixedIP(address='192.168.1.1'), fixed_ip.FixedIP(address='192.168.1.2')] fixed_ip.FixedIPList.bulk_create(self.context, fixed_ips) bulk.assert_called_once_with(self.context, [{'address': '192.168.1.1'}, {'address': '192.168.1.2'}]) @mock.patch('nova.db.network_get_associated_fixed_ips') def test_get_by_network(self, get): info = {'address': '1.2.3.4', 'instance_uuid': 'fake-uuid', 'network_id': 0, 'vif_id': 1, 'vif_address': 'de:ad:be:ee:f0:00', 'instance_hostname': 'fake-host', 'instance_updated': datetime.datetime(1955, 11, 5), 'instance_created': datetime.datetime(1955, 11, 5), 'allocated': True, 'leased': True, 'default_route': True, } get.return_value = [info] fixed_ips = fixed_ip.FixedIPList.get_by_network( self.context, {'id': 0}, host='fake-host') get.assert_called_once_with(self.context, 0, host='fake-host') self.assertEqual(1, len(fixed_ips)) fip = fixed_ips[0] self.assertEqual('1.2.3.4', str(fip.address)) self.assertEqual('fake-uuid', fip.instance_uuid) self.assertEqual(0, fip.network_id) self.assertEqual(1, fip.virtual_interface_id) self.assertTrue(fip.allocated) self.assertTrue(fip.leased) self.assertEqual('fake-uuid', fip.instance.uuid) self.assertEqual('fake-host', fip.instance.hostname) self.assertIsInstance(fip.instance.created_at, datetime.datetime) self.assertIsInstance(fip.instance.updated_at, datetime.datetime) self.assertEqual(1, fip.virtual_interface.id) self.assertEqual(info['vif_address'], fip.virtual_interface.address) @mock.patch('nova.db.network_get_associated_fixed_ips') def test_backport_default_route(self, mock_get): info = {'address': '1.2.3.4', 'instance_uuid': 'fake-uuid', 'network_id': 0, 'vif_id': 1, 'vif_address': 'de:ad:be:ee:f0:00', 'instance_hostname': 'fake-host', 'instance_updated': datetime.datetime(1955, 11, 5), 'instance_created': datetime.datetime(1955, 11, 5), 'allocated': True, 'leased': True, 'default_route': True, } mock_get.return_value = [info] fixed_ips = fixed_ip.FixedIPList.get_by_network( self.context, {'id': 0}, host='fake-host') primitive = fixed_ips[0].obj_to_primitive() self.assertIn('default_route', primitive['nova_object.data']) fixed_ips[0].obj_make_compatible(primitive['nova_object.data'], '1.1') self.assertNotIn('default_route', primitive['nova_object.data']) class TestFixedIPObject(test_objects._LocalTest, _TestFixedIPObject): pass class TestRemoteFixedIPObject(test_objects._RemoteTest, _TestFixedIPObject): pass
apache-2.0
rodrigc/buildbot
master/buildbot/test/fake/httpclientservice.py
2
6948
# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import json as jsonmodule import mock from twisted.internet import defer from zope.interface import implementer from buildbot.interfaces import IHttpResponse from buildbot.util import httpclientservice from buildbot.util import service from buildbot.util import toJson from buildbot.util import unicode2bytes from buildbot.util.logger import Logger from buildbot.warnings import warn_deprecated log = Logger() @implementer(IHttpResponse) class ResponseWrapper: def __init__(self, code, content, url=None): self._content = content self._code = code self._url = url def content(self): content = unicode2bytes(self._content) return defer.succeed(content) def json(self): return defer.succeed(jsonmodule.loads(self._content)) @property def code(self): return self._code @property def url(self): return self._url class HTTPClientService(service.SharedService): """ HTTPClientService is a SharedService class that fakes http requests for buildbot http service testing. This class is named the same as the real HTTPClientService so that it could replace the real class in tests. If a test creates this class earlier than the real one, fake is going to be used until the master is destroyed. Whenever a master wants to create real HTTPClientService, it will find an existing fake service with the same name and use it instead. """ quiet = False def __init__(self, base_url, auth=None, headers=None, debug=None, verify=None, skipEncoding=None): assert not base_url.endswith("/"), "baseurl should not end with /" super().__init__() self._base_url = base_url self._auth = auth self._headers = headers self._session = None self._expected = [] def updateHeaders(self, headers): if self._headers is None: self._headers = {} self._headers.update(headers) @classmethod def getFakeService(cls, master, case, *args, **kwargs): warn_deprecated('2.9.0', 'getFakeService() has been deprecated, use getService()') return cls.getService(master, case, *args, **kwargs) @classmethod @defer.inlineCallbacks def getService(cls, master, case, *args, **kwargs): def assertNotCalled(self, *_args, **_kwargs): case.fail(("HTTPClientService called with *{!r}, **{!r} " "while should be called *{!r} **{!r}").format( _args, _kwargs, args, kwargs)) case.patch(httpclientservice.HTTPClientService, "__init__", assertNotCalled) service = yield super().getService(master, *args, **kwargs) service.case = case case.addCleanup(service.assertNoOutstanding) return service # tests should ensure this has been called checkAvailable = mock.Mock() def expect(self, method, ep, params=None, headers=None, data=None, json=None, code=200, content=None, content_json=None, files=None): if content is not None and content_json is not None: return ValueError("content and content_json cannot be both specified") if content_json is not None: content = jsonmodule.dumps(content_json, default=toJson) self._expected.append(dict( method=method, ep=ep, params=params, headers=headers, data=data, json=json, code=code, content=content, files=files)) return None def assertNoOutstanding(self): self.case.assertEqual(0, len(self._expected), "expected more http requests:\n {!r}".format(self._expected)) def _doRequest(self, method, ep, params=None, headers=None, data=None, json=None, files=None, timeout=None): assert ep == "" or ep.startswith("/"), "ep should start with /: " + ep if not self.quiet: log.debug("{method} {ep} {params!r} <- {data!r}", method=method, ep=ep, params=params, data=data or json) if json is not None: # ensure that the json is really jsonable jsonmodule.dumps(json, default=toJson) if files is not None: files = dict((k, v.read()) for (k, v) in files.items()) if not self._expected: raise AssertionError( "Not expecting a request, while we got: " "method={!r}, ep={!r}, params={!r}, headers={!r}, " "data={!r}, json={!r}, files={!r}".format( method, ep, params, headers, data, json, files)) expect = self._expected.pop(0) # pylint: disable=too-many-boolean-expressions if (expect['method'] != method or expect['ep'] != ep or expect['params'] != params or expect['headers'] != headers or expect['data'] != data or expect['json'] != json or expect['files'] != files): raise AssertionError( "expecting:\n" "method={!r}, ep={!r}, params={!r}, headers={!r}, " "data={!r}, json={!r}, files={!r}\n" "got :\n" "method={!r}, ep={!r}, params={!r}, headers={!r}, " "data={!r}, json={!r}, files={!r}".format( expect['method'], expect['ep'], expect['params'], expect['headers'], expect['data'], expect['json'], expect['files'], method, ep, params, headers, data, json, files, )) if not self.quiet: log.debug("{method} {ep} -> {code} {content!r}", method=method, ep=ep, code=expect['code'], content=expect['content']) return defer.succeed(ResponseWrapper(expect['code'], expect['content'])) # lets be nice to the auto completers, and don't generate that code def get(self, ep, **kwargs): return self._doRequest('get', ep, **kwargs) def put(self, ep, **kwargs): return self._doRequest('put', ep, **kwargs) def delete(self, ep, **kwargs): return self._doRequest('delete', ep, **kwargs) def post(self, ep, **kwargs): return self._doRequest('post', ep, **kwargs)
gpl-2.0
dsyer/zipkin
doc/src/sphinx/conf.py
9
2015
# -*- coding: utf-8 -*- # # Documentation config # import sys, os sys.path.append(os.path.abspath('exts')) sys.path.append(os.path.abspath('utils')) import sbt_versions # highlight_language = 'scala' highlight_language = 'text' # this way we don't get ugly syntax coloring extensions = ['sphinx.ext.extlinks', 'includecode'] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' exclude_patterns = [] sys.path.append(os.path.abspath('_themes')) html_theme_path = ['_themes'] html_theme = 'flask' html_short_title = 'Zipkin' html_static_path = ['_static'] html_sidebars = { 'index': ['sidebarintro.html', 'searchbox.html'], '**': ['sidebarlogo.html', 'sidebarintro.html', 'localtoc.html', 'relations.html', 'searchbox.html'] } html_theme_options = { 'index_logo': None } # These don't seem to work? html_use_smartypants = True html_show_sphinx = False html_style = 'zipkin.css' project = u'Zipkin' copyright = u'2014 Twitter, Inc' htmlhelp_basename = "zipkin" release = sbt_versions.find_release(os.path.abspath('../../../project/Build.scala')) version = sbt_versions.release_to_version(release) # e.g. :issue:`36` :ticket:`8` extlinks = { 'issue': ('https://github.com/twitter/zipkin/issues/%s', 'issue #'), 'ex': ('https://github.com/twitter/zipkin/blob/zipkin-example/src/main/scala/%s', 'Zipkin example '), 'api': ('http://twitter.github.io/zipkin/docs/#%s', '') } rst_epilog = ''' .. _Zipkin Examples: https://github.com/twitter/zipkin/tree/zipkin-example ''' pygments_style = 'flask_theme_support.FlaskyStyle' # fall back if theme is not there try: __import__('flask_theme_support') except ImportError, e: print '-' * 74 print 'Warning: Flask themes unavailable. Building with default theme' print 'If you want the Flask themes, run this command and build again:' print print ' git submodule update --init' print '-' * 74 pygments_style = 'tango' html_theme = 'default' html_theme_options = {}
apache-2.0
wenhuizhang/neutron
neutron/tests/unit/hacking/test_checks.py
24
6380
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from neutron.hacking import checks from neutron.tests import base class HackingTestCase(base.BaseTestCase): def assertLinePasses(self, func, line): with testtools.ExpectedException(StopIteration): next(func(line)) def assertLineFails(self, func, line): self.assertIsInstance(next(func(line)), tuple) def test_log_translations(self): expected_marks = { 'error': '_LE', 'info': '_LI', 'warn': '_LW', 'warning': '_LW', 'critical': '_LC', 'exception': '_LE', } logs = expected_marks.keys() debug = "LOG.debug('OK')" self.assertEqual( 0, len(list(checks.validate_log_translations(debug, debug, 'f')))) for log in logs: bad = 'LOG.%s("Bad")' % log self.assertEqual( 1, len(list(checks.validate_log_translations(bad, bad, 'f')))) ok = "LOG.%s('OK') # noqa" % log self.assertEqual( 0, len(list(checks.validate_log_translations(ok, ok, 'f')))) ok = "LOG.%s(variable)" % log self.assertEqual( 0, len(list(checks.validate_log_translations(ok, ok, 'f')))) for mark in checks._all_hints: stmt = "LOG.%s(%s('test'))" % (log, mark) self.assertEqual( 0 if expected_marks[log] == mark else 1, len(list(checks.validate_log_translations(stmt, stmt, 'f')))) def test_no_translate_debug_logs(self): for hint in checks._all_hints: bad = "LOG.debug(%s('bad'))" % hint self.assertEqual( 1, len(list(checks.no_translate_debug_logs(bad, 'f')))) def test_use_jsonutils(self): def __get_msg(fun): msg = ("N321: jsonutils.%(fun)s must be used instead of " "json.%(fun)s" % {'fun': fun}) return [(0, msg)] for method in ('dump', 'dumps', 'load', 'loads'): self.assertEqual( __get_msg(method), list(checks.use_jsonutils("json.%s(" % method, "./neutron/common/rpc.py"))) self.assertEqual(0, len(list(checks.use_jsonutils("jsonx.%s(" % method, "./neutron/common/rpc.py")))) self.assertEqual(0, len(list(checks.use_jsonutils("json.%sx(" % method, "./neutron/common/rpc.py")))) self.assertEqual(0, len(list(checks.use_jsonutils( "json.%s" % method, "./neutron/plugins/ml2/drivers/openvswitch/agent/xenapi/" "etc/xapi.d/plugins/netwrap")))) def test_assert_called_once_with(self): fail_code1 = """ mock = Mock() mock.method(1, 2, 3, test='wow') mock.method.assert_called_once() """ fail_code2 = """ mock = Mock() mock.method(1, 2, 3, test='wow') mock.method.assertCalledOnceWith() """ fail_code3 = """ mock = Mock() mock.method(1, 2, 3, test='wow') mock.method.assert_has_called() """ pass_code = """ mock = Mock() mock.method(1, 2, 3, test='wow') mock.method.assert_called_once_with() """ pass_code2 = """ mock = Mock() mock.method(1, 2, 3, test='wow') mock.method.assert_has_calls() """ self.assertEqual( 1, len(list(checks.check_assert_called_once_with(fail_code1, "neutron/tests/test_assert.py")))) self.assertEqual( 1, len(list(checks.check_assert_called_once_with(fail_code2, "neutron/tests/test_assert.py")))) self.assertEqual( 0, len(list(checks.check_assert_called_once_with(pass_code, "neutron/tests/test_assert.py")))) self.assertEqual( 1, len(list(checks.check_assert_called_once_with(fail_code3, "neutron/tests/test_assert.py")))) self.assertEqual( 0, len(list(checks.check_assert_called_once_with(pass_code2, "neutron/tests/test_assert.py")))) def test_check_oslo_namespace_imports(self): f = checks.check_oslo_namespace_imports self.assertLinePasses(f, 'from oslo_utils import importutils') self.assertLinePasses(f, 'import oslo_messaging') self.assertLineFails(f, 'from oslo.utils import importutils') self.assertLineFails(f, 'from oslo import messaging') self.assertLineFails(f, 'import oslo.messaging') def test_check_python3_xrange(self): f = checks.check_python3_xrange self.assertLineFails(f, 'a = xrange(1000)') self.assertLineFails(f, 'b =xrange ( 42 )') self.assertLineFails(f, 'c = xrange(1, 10, 2)') self.assertLinePasses(f, 'd = range(1000)') self.assertLinePasses(f, 'e = six.moves.range(1337)') def test_no_basestring(self): self.assertEqual(1, len(list(checks.check_no_basestring("isinstance(x, basestring)")))) def test_check_python3_iteritems(self): f = checks.check_python3_no_iteritems self.assertLineFails(f, "d.iteritems()") self.assertLinePasses(f, "six.iteritems(d)")
apache-2.0
Northrend/mxnet
tests/python/unittest/test_gluon_rnn.py
13
12698
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import mxnet as mx from mxnet import gluon import numpy as np from numpy.testing import assert_allclose import unittest from mxnet.test_utils import almost_equal def test_rnn(): cell = gluon.rnn.RNNCell(100, prefix='rnn_') inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)] outputs, _ = cell.unroll(3, inputs) outputs = mx.sym.Group(outputs) assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight'] assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output'] args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50)) assert outs == [(10, 100), (10, 100), (10, 100)] def test_lstm(): cell = gluon.rnn.LSTMCell(100, prefix='rnn_') inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)] outputs, _ = cell.unroll(3, inputs) outputs = mx.sym.Group(outputs) assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight'] assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output'] args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50)) assert outs == [(10, 100), (10, 100), (10, 100)] def test_lstm_forget_bias(): forget_bias = 2.0 stack = gluon.rnn.SequentialRNNCell() stack.add(gluon.rnn.LSTMCell(100, i2h_bias_initializer=mx.init.LSTMBias(forget_bias), prefix='l0_')) stack.add(gluon.rnn.LSTMCell(100, i2h_bias_initializer=mx.init.LSTMBias(forget_bias), prefix='l1_')) dshape = (32, 1, 200) data = mx.sym.Variable('data') sym, _ = stack.unroll(1, data, merge_outputs=True) mod = mx.mod.Module(sym, label_names=None, context=mx.cpu(0)) mod.bind(data_shapes=[('data', dshape)], label_shapes=None) mod.init_params() bias_argument = next(x for x in sym.list_arguments() if x.endswith('i2h_bias')) expected_bias = np.hstack([np.zeros((100,)), forget_bias * np.ones(100, ), np.zeros((2 * 100,))]) assert_allclose(mod.get_params()[0][bias_argument].asnumpy(), expected_bias) def test_gru(): cell = gluon.rnn.GRUCell(100, prefix='rnn_') inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)] outputs, _ = cell.unroll(3, inputs) outputs = mx.sym.Group(outputs) assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight'] assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output'] args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50)) assert outs == [(10, 100), (10, 100), (10, 100)] def test_residual(): cell = gluon.rnn.ResidualCell(gluon.rnn.GRUCell(50, prefix='rnn_')) inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(2)] outputs, _ = cell.unroll(2, inputs) outputs = mx.sym.Group(outputs) assert sorted(cell.collect_params().keys()) == \ ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight'] # assert outputs.list_outputs() == \ # ['rnn_t0_out_plus_residual_output', 'rnn_t1_out_plus_residual_output'] args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10, 50), rnn_t1_data=(10, 50)) assert outs == [(10, 50), (10, 50)] outputs = outputs.eval(rnn_t0_data=mx.nd.ones((10, 50)), rnn_t1_data=mx.nd.ones((10, 50)), rnn_i2h_weight=mx.nd.zeros((150, 50)), rnn_i2h_bias=mx.nd.zeros((150,)), rnn_h2h_weight=mx.nd.zeros((150, 50)), rnn_h2h_bias=mx.nd.zeros((150,))) expected_outputs = np.ones((10, 50)) assert np.array_equal(outputs[0].asnumpy(), expected_outputs) assert np.array_equal(outputs[1].asnumpy(), expected_outputs) def test_residual_bidirectional(): cell = gluon.rnn.ResidualCell( gluon.rnn.BidirectionalCell( gluon.rnn.GRUCell(25, prefix='rnn_l_'), gluon.rnn.GRUCell(25, prefix='rnn_r_'))) inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(2)] outputs, _ = cell.unroll(2, inputs, merge_outputs=False) outputs = mx.sym.Group(outputs) assert sorted(cell.collect_params().keys()) == \ ['rnn_l_h2h_bias', 'rnn_l_h2h_weight', 'rnn_l_i2h_bias', 'rnn_l_i2h_weight', 'rnn_r_h2h_bias', 'rnn_r_h2h_weight', 'rnn_r_i2h_bias', 'rnn_r_i2h_weight'] # assert outputs.list_outputs() == \ # ['bi_t0_plus_residual_output', 'bi_t1_plus_residual_output'] args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10, 50), rnn_t1_data=(10, 50)) assert outs == [(10, 50), (10, 50)] outputs = outputs.eval(rnn_t0_data=mx.nd.ones((10, 50))+5, rnn_t1_data=mx.nd.ones((10, 50))+5, rnn_l_i2h_weight=mx.nd.zeros((75, 50)), rnn_l_i2h_bias=mx.nd.zeros((75,)), rnn_l_h2h_weight=mx.nd.zeros((75, 25)), rnn_l_h2h_bias=mx.nd.zeros((75,)), rnn_r_i2h_weight=mx.nd.zeros((75, 50)), rnn_r_i2h_bias=mx.nd.zeros((75,)), rnn_r_h2h_weight=mx.nd.zeros((75, 25)), rnn_r_h2h_bias=mx.nd.zeros((75,))) expected_outputs = np.ones((10, 50))+5 assert np.array_equal(outputs[0].asnumpy(), expected_outputs) assert np.array_equal(outputs[1].asnumpy(), expected_outputs) def test_stack(): cell = gluon.rnn.SequentialRNNCell() for i in range(5): if i == 1: cell.add(gluon.rnn.ResidualCell(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_' % i))) else: cell.add(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_'%i)) inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)] outputs, _ = cell.unroll(3, inputs) outputs = mx.sym.Group(outputs) keys = sorted(cell.collect_params().keys()) for i in range(5): assert 'rnn_stack%d_h2h_weight'%i in keys assert 'rnn_stack%d_h2h_bias'%i in keys assert 'rnn_stack%d_i2h_weight'%i in keys assert 'rnn_stack%d_i2h_bias'%i in keys assert outputs.list_outputs() == ['rnn_stack4_t0_out_output', 'rnn_stack4_t1_out_output', 'rnn_stack4_t2_out_output'] args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50)) assert outs == [(10, 100), (10, 100), (10, 100)] def test_bidirectional(): cell = gluon.rnn.BidirectionalCell( gluon.rnn.LSTMCell(100, prefix='rnn_l0_'), gluon.rnn.LSTMCell(100, prefix='rnn_r0_'), output_prefix='rnn_bi_') inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)] outputs, _ = cell.unroll(3, inputs) outputs = mx.sym.Group(outputs) assert outputs.list_outputs() == ['rnn_bi_t0_output', 'rnn_bi_t1_output', 'rnn_bi_t2_output'] args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50)) assert outs == [(10, 200), (10, 200), (10, 200)] def test_zoneout(): cell = gluon.rnn.ZoneoutCell(gluon.rnn.RNNCell(100, prefix='rnn_'), zoneout_outputs=0.5, zoneout_states=0.5) inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)] outputs, _ = cell.unroll(3, inputs) outputs = mx.sym.Group(outputs) args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50)) assert outs == [(10, 100), (10, 100), (10, 100)] def check_rnn_forward(layer, inputs, deterministic=True): inputs.attach_grad() layer.collect_params().initialize() with mx.autograd.record(): out = layer.unroll(3, inputs, merge_outputs=False)[0] mx.autograd.backward(out) out = layer.unroll(3, inputs, merge_outputs=True)[0] out.backward() np_out = out.asnumpy() np_dx = inputs.grad.asnumpy() layer.hybridize() with mx.autograd.record(): out = layer.unroll(3, inputs, merge_outputs=False)[0] mx.autograd.backward(out) out = layer.unroll(3, inputs, merge_outputs=True)[0] out.backward() if deterministic: mx.test_utils.assert_almost_equal(np_out, out.asnumpy(), rtol=1e-3, atol=1e-5) mx.test_utils.assert_almost_equal(np_dx, inputs.grad.asnumpy(), rtol=1e-3, atol=1e-5) def test_rnn_cells(): check_rnn_forward(gluon.rnn.LSTMCell(100, input_size=200), mx.nd.ones((8, 3, 200))) check_rnn_forward(gluon.rnn.RNNCell(100, input_size=200), mx.nd.ones((8, 3, 200))) check_rnn_forward(gluon.rnn.GRUCell(100, input_size=200), mx.nd.ones((8, 3, 200))) bilayer = gluon.rnn.BidirectionalCell(gluon.rnn.LSTMCell(100, input_size=200), gluon.rnn.LSTMCell(100, input_size=200)) check_rnn_forward(bilayer, mx.nd.ones((8, 3, 200))) check_rnn_forward(gluon.rnn.DropoutCell(0.5), mx.nd.ones((8, 3, 200)), False) check_rnn_forward(gluon.rnn.ZoneoutCell(gluon.rnn.LSTMCell(100, input_size=200), 0.5, 0.2), mx.nd.ones((8, 3, 200)), False) net = gluon.rnn.SequentialRNNCell() net.add(gluon.rnn.LSTMCell(100, input_size=200)) net.add(gluon.rnn.RNNCell(100, input_size=100)) net.add(gluon.rnn.GRUCell(100, input_size=100)) check_rnn_forward(net, mx.nd.ones((8, 3, 200))) def check_rnn_layer_forward(layer, inputs, states=None): layer.collect_params().initialize() inputs.attach_grad() with mx.autograd.record(): out = layer(inputs, states) if states is not None: assert isinstance(out, tuple) and len(out) == 2 out = out[0] else: assert isinstance(out, mx.nd.NDArray) out.backward() np_out = out.asnumpy() np_dx = inputs.grad.asnumpy() layer.hybridize() with mx.autograd.record(): out = layer(inputs, states) if states is not None: assert isinstance(out, tuple) and len(out) == 2 out = out[0] else: assert isinstance(out, mx.nd.NDArray) out.backward() mx.test_utils.assert_almost_equal(np_out, out.asnumpy(), rtol=1e-3, atol=1e-5) mx.test_utils.assert_almost_equal(np_dx, inputs.grad.asnumpy(), rtol=1e-3, atol=1e-5) def test_rnn_layers(): check_rnn_layer_forward(gluon.rnn.RNN(10, 2), mx.nd.ones((8, 3, 20))) check_rnn_layer_forward(gluon.rnn.RNN(10, 2), mx.nd.ones((8, 3, 20)), mx.nd.ones((2, 3, 10))) check_rnn_layer_forward(gluon.rnn.LSTM(10, 2), mx.nd.ones((8, 3, 20))) check_rnn_layer_forward(gluon.rnn.LSTM(10, 2), mx.nd.ones((8, 3, 20)), [mx.nd.ones((2, 3, 10)), mx.nd.ones((2, 3, 10))]) check_rnn_layer_forward(gluon.rnn.GRU(10, 2), mx.nd.ones((8, 3, 20))) check_rnn_layer_forward(gluon.rnn.GRU(10, 2), mx.nd.ones((8, 3, 20)), mx.nd.ones((2, 3, 10))) net = gluon.nn.Sequential() net.add(gluon.rnn.LSTM(10, 2, bidirectional=True)) net.add(gluon.nn.BatchNorm(axis=2)) net.add(gluon.nn.Flatten()) net.add(gluon.nn.Dense(3, activation='relu')) net.collect_params().initialize() with mx.autograd.record(): net(mx.nd.ones((2, 3, 10))).backward() def test_cell_fill_shape(): cell = gluon.rnn.LSTMCell(10) cell.hybridize() check_rnn_forward(cell, mx.nd.ones((2, 3, 7))) assert cell.i2h_weight.shape[1] == 7, cell.i2h_weight.shape[1] def test_layer_fill_shape(): layer = gluon.rnn.LSTM(10) layer.hybridize() check_rnn_layer_forward(layer, mx.nd.ones((3, 2, 7))) print(layer) assert layer.i2h_weight[0].shape[1] == 7, layer.i2h_weight[0].shape[1] if __name__ == '__main__': import nose nose.runmodule()
apache-2.0
reddit/mcrouter
mcrouter/test/test_mcrouter.py
2
14207
# Copyright (c) 2015, Facebook, Inc. # All rights reserved. # # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. An additional grant # of patent rights can be found in the PATENTS file in the same directory. from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import time from mcrouter.test.MCProcess import Memcached from mcrouter.test.McrouterTestCase import McrouterTestCase class TestDevNull(McrouterTestCase): config = './mcrouter/test/test_dev_null.json' extra_args = [] def setUp(self): # The order here must corresponds to the order of hosts in the .json self.mc_good = self.add_server(Memcached()) self.mc_wild = self.add_server(Memcached()) def get_mcrouter(self): return self.add_mcrouter(self.config, extra_args=self.extra_args) def test_dev_null(self): mcr = self.get_mcrouter() # finally setup is done mcr.set("good:key", "should_be_set") mcr.set("key", "should_be_set_wild") mcr.set("null:key", "should_not_be_set") mcgood_val = self.mc_good.get("good:key") mcnull_val = self.mc_wild.get("null:key") mcwild_val = self.mc_wild.get("key") self.assertEqual(mcgood_val, "should_be_set") self.assertEqual(mcnull_val, None) self.assertEqual(mcwild_val, "should_be_set_wild") self.assertEqual(mcr.delete("null:key2"), None) self.assertEqual(int(mcr.stats('ods')['dev_null_requests']), 2) class TestMigratedPools(McrouterTestCase): config = './mcrouter/test/test_migrated_pools.json' extra_args = [] def setUp(self): self.wild_new = self.add_server(Memcached()) self.wild_old = self.add_server(Memcached()) def get_mcrouter(self): return self.add_mcrouter( self.config, extra_args=self.extra_args, replace_map={"START_TIME": (int(time.time()) + 2)}) def test_migrated_pools(self): mcr = self.get_mcrouter() #set keys that should be deleted in later phases for phase in range(1, 5): self.wild_old.set("get-key-" + str(phase), str(phase)) self.wild_new.set("get-key-" + str(phase), str(phase * 100)) # first we are in the old domain make sure all ops go to # the old host only self.assertEqual(mcr.get("get-key-1"), str(1)) mcr.set("set-key-1", str(42)) self.assertEqual(self.wild_old.get("set-key-1"), str(42)) self.assertEqual(self.wild_new.get("set-key-1"), None) mcr.delete("get-key-1") #make sure the delete went to old but not new self.assertEqual(self.wild_old.get("get-key-1"), None) self.assertEqual(self.wild_new.get("get-key-1"), str(100)) #next phase time.sleep(2) # gets/sets go to the old place self.assertEqual(mcr.get("get-key-2"), str(2)) mcr.set("set-key-2", str(4242)) self.assertEqual(self.wild_old.get("set-key-2"), str(4242)) self.assertEqual(self.wild_new.get("set-key-2"), None) mcr.delete("get-key-2") #make sure the delete went to both places self.assertEqual(self.wild_old.get("get-key-2"), None) self.assertEqual(self.wild_new.get("get-key-2"), None) #next phase time.sleep(2) # gets/sets go to the new place self.assertEqual(mcr.get("get-key-3"), str(300)) mcr.set("set-key-3", str(424242)) self.assertEqual(self.wild_old.get("set-key-3"), None) self.assertEqual(self.wild_new.get("set-key-3"), str(424242)) mcr.delete("get-key-3") #make sure the delete went to both places self.assertEqual(self.wild_old.get("get-key-3"), None) self.assertEqual(self.wild_new.get("get-key-3"), None) #next phase time.sleep(2) # gets/sets go to the new place self.assertEqual(mcr.get("get-key-4"), str(400)) mcr.set("set-key-4", str(42424242)) self.assertEqual(self.wild_old.get("set-key-4"), None) self.assertEqual(self.wild_new.get("set-key-4"), str(42424242)) mcr.delete("get-key-4") #make sure the delete went to the new place only self.assertEqual(self.wild_old.get("get-key-4"), str(4)) self.assertEqual(self.wild_new.get("get-key-4"), None) class TestMigratedPoolsFailover(McrouterTestCase): config = './mcrouter/test/test_migrated_pools_failover.json' extra_args = [] def setUp(self): self.a_new = self.add_server(Memcached()) self.a_old = self.add_server(Memcached()) self.b_new = self.add_server(Memcached()) self.b_old = self.add_server(Memcached()) def get_mcrouter(self): return self.add_mcrouter( self.config, extra_args=self.extra_args, replace_map={"START_TIME": (int(time.time()) + 2)}) def test_migrated_pools_failover(self): mcr = self.get_mcrouter() #set keys that should be deleted in later phases for phase in range(1, 5): self.a_old.set("get-key-" + str(phase), str(phase)) self.a_new.set("get-key-" + str(phase), str(phase * 10)) self.b_old.set("get-key-" + str(phase), str(phase * 100)) self.b_new.set("get-key-" + str(phase), str(phase * 1000)) # first we are in the old domain make sure all ops go to # the old host only self.assertEqual(mcr.get("get-key-1"), str(1)) mcr.set("set-key-1", str(42)) self.assertEqual(self.a_old.get("set-key-1"), str(42)) self.a_old.terminate() self.assertEqual(mcr.get("get-key-1"), str(100)) mcr.set("set-key-1", str(42)) self.assertEqual(self.b_old.get("set-key-1"), str(42)) #next phase time.sleep(2.5) self.assertEqual(mcr.get("get-key-2"), str(200)) mcr.set("set-key-2", str(42)) self.assertEqual(self.b_old.get("set-key-2"), str(42)) #next phase time.sleep(2.5) # gets/sets go to the new place self.assertEqual(mcr.get("get-key-3"), str(30)) mcr.set("set-key-3", str(424242)) self.assertEqual(self.a_new.get("set-key-3"), str(424242)) self.a_new.terminate() self.assertEqual(mcr.get("get-key-3"), str(3000)) class TestDuplicateServers(McrouterTestCase): config = './mcrouter/test/test_duplicate_servers.json' extra_args = [] def setUp(self): self.wildcard = self.add_server(Memcached(), 12345) def get_mcrouter(self): return self.add_mcrouter( self.config, '/a/a/', extra_args=self.extra_args) def test_duplicate_servers(self): mcr = self.get_mcrouter() stats = mcr.stats('servers') # Check that only one proxy destination connection is made # for all the duplicate servers self.assertEqual(1, len(stats)) # Hardcoding default server timeout key = 'localhost:' + str(self.port_map[12345]) + ':TCP:ascii-1000' self.assertTrue(key in stats) class TestDuplicateServersDiffTimeouts(McrouterTestCase): config = './mcrouter/test/test_duplicate_servers_difftimeouts.json' extra_args = [] def setUp(self): self.wildcard = self.add_server(Memcached(), 12345) def get_mcrouter(self): return self.add_mcrouter( self.config, '/a/a/', extra_args=self.extra_args) def test_duplicate_servers_difftimeouts(self): mcr = self.get_mcrouter() stats = mcr.stats('servers') # Check that only two proxy destination connections are made # for all the duplicate servers in pools with diff timeout self.assertEqual(2, len(stats)) # Hardcoding default server timeout key = 'localhost:' + str(self.port_map[12345]) + ':TCP:ascii-1000' self.assertTrue(key in stats) key = 'localhost:' + str(self.port_map[12345]) + ':TCP:ascii-2000' self.assertTrue(key in stats) class TestSamePoolFailover(McrouterTestCase): config = './mcrouter/test/test_same_pool_failover.json' extra_args = [] def setUp(self): self.add_server(Memcached(), 12345) def get_mcrouter(self): return self.add_mcrouter(self.config, extra_args=self.extra_args) def test_same_pool_failover(self): mcr = self.get_mcrouter() self.assertEqual(mcr.get('foobar'), None) self.assertTrue(mcr.set('foobar', 'bizbang')) self.assertEqual(mcr.get('foobar'), 'bizbang') mcr.delete('foobar') self.assertEqual(mcr.get('foobar'), None) class TestGetFailover(McrouterTestCase): config = './mcrouter/test/test_get_failover.json' extra_args = [] def setUp(self): self.gut = self.add_server(Memcached()) self.wildcard = self.add_server(Memcached()) def get_mcrouter(self): return self.add_mcrouter(self.config, extra_args=self.extra_args) def failover_common(self, key): self.mcr = self.get_mcrouter() self.assertEqual(self.mcr.get(key), None) self.assertTrue(self.mcr.set(key, 'bizbang')) self.assertEqual(self.mcr.get(key), 'bizbang') # kill the main host so everything failsover to gut self.wildcard.terminate() self.assertEqual(self.mcr.get(key), None) self.assertTrue(self.mcr.set(key, 'bizbang-fail')) self.assertEqual(self.mcr.get(key), 'bizbang-fail') def test_get_failover(self): self.failover_common('testkey') # the failover should have set it with a much shorter TTL # so make sure that we can't get the value after the TTL # has expired time.sleep(4) self.assertEqual(self.mcr.get('testkey'), None) class TestGetFailoverWithFailoverTag(TestGetFailover): config = './mcrouter/test/test_get_failover_with_failover_tag.json' def test_get_failover(self): key = 'testkey|#|extra=1' self.failover_common(key) # Verify the failover tag was appended fail_key = key + ":failover=1" self.assertEqual(self.mcr.get(key), 'bizbang-fail') self.assertEqual(self.gut.get(fail_key), 'bizbang-fail') class TestLeaseGetFailover(McrouterTestCase): config = './mcrouter/test/test_get_failover.json' extra_args = [] def setUp(self): self.gut = self.add_server(Memcached()) self.wildcard = self.add_server(Memcached()) def get_mcrouter(self): return self.add_mcrouter(self.config, extra_args=self.extra_args) def test_lease_get_failover(self): mcr = self.get_mcrouter() get_res = {} get_res['testkey'] = mcr.leaseGet('testkey') get_res['testkey']['value'] = 'bizbang-lease' self.assertGreater(get_res['testkey']['token'], 0) self.assertTrue(mcr.leaseSet('testkey', get_res['testkey'])) get_res['testkey'] = mcr.leaseGet('testkey') self.assertFalse(get_res['testkey']['token']) self.assertEqual(get_res['testkey']['value'], 'bizbang-lease') # kill the main host so everything failsover to mctestc00.gut self.wildcard.terminate() get_res['testkey'] = mcr.leaseGet('testkey') get_res['testkey']['value'] = 'bizbang-lease-fail' self.assertGreater(get_res['testkey']['token'], 0) self.assertTrue(mcr.leaseSet('testkey', get_res['testkey'])) get_res['testkey'] = mcr.leaseGet('testkey') self.assertFalse(get_res['testkey']['token']) self.assertEqual(get_res['testkey']['value'], 'bizbang-lease-fail') # the failover should have set it with a much shorter TTL # so make sure that we can't get the value after the TTL # has expired time.sleep(4) get_res['testkey'] = mcr.leaseGet('testkey') self.assertGreater(get_res['testkey']['token'], 0) self.assertFalse(get_res['testkey']['value']) class TestMetaGetFailover(McrouterTestCase): config = './mcrouter/test/test_get_failover.json' extra_args = [] def setUp(self): self.gut = self.add_server(Memcached()) self.wildcard = self.add_server(Memcached()) def get_mcrouter(self): return self.add_mcrouter(self.config, extra_args=self.extra_args) def test_metaget_failover(self): mcr = self.get_mcrouter() get_res = {} self.assertTrue(mcr.set('testkey', 'bizbang')) get_res = mcr.metaget('testkey') self.assertEqual(0, int(get_res['exptime'])) self.wildcard.terminate() self.assertTrue(mcr.set('testkey', 'bizbang-fail')) self.assertEqual(mcr.get('testkey'), 'bizbang-fail') get_res = mcr.metaget('testkey') self.assertAlmostEqual(int(get_res['exptime']), int(time.time()) + 3, delta=1) # the failover should have set it with a much shorter TTL # so make sure that we can't get the value after the TTL # has expired time.sleep(4) self.assertEqual(mcr.metaget('testkey'), {}) self.assertEqual(mcr.get('testkey'), None) class TestFailoverWithLimit(McrouterTestCase): config = './mcrouter/test/test_failover_limit.json' def setUp(self): self.gut = self.add_server(Memcached()) self.wildcard = self.add_server(Memcached()) def get_mcrouter(self): return self.add_mcrouter(self.config) def test_failover_limit(self): mcr = self.get_mcrouter() self.assertTrue(mcr.set('key', 'value.wildcard')) self.assertEqual(mcr.get('key'), 'value.wildcard') self.wildcard.terminate() # first 12 requests should succeed (10 burst + 2 rate) self.assertTrue(mcr.set('key', 'value.gut')) for i in range(11): self.assertEqual(mcr.get('key'), 'value.gut') # now every 5th request should succeed for i in range(10): for j in range(4): self.assertIsNone(mcr.get('key')) self.assertEqual(mcr.get('key'), 'value.gut')
bsd-3-clause
janezkranjc/clowdflows
workflows/management/commands/import_package_old.py
4
13681
from django.core.management.base import BaseCommand, CommandError from workflows.models import Category, AbstractWidget, AbstractInput, AbstractOutput, AbstractOption from django.core import serializers #dirty python2.6 fix try: from collections import Counter except: def Counter(list): return set(list) from optparse import make_option class Command(BaseCommand): args = 'file_name' help = 'Imports all models from the file named "file_name". All models in the database which have the same uuid as imported models are updated. The folowing models are included in inport: AbstractWidget, Category, AbstractInput, AbstractOutput, and AbstractOption.' option_list = BaseCommand.option_list + ( make_option('-r', '--replace', action="store_true", dest='replace', default=False, help='Completely replace whole widgets with the new one where UIDs match. Default behaviour merges widgets submodels (AbstractInputs, AbstractOutputs and AbstratcOptions)' 'based on their submodel\'s own UID. When using this option all widget\'s old submodels are deleted and completely replaced by new submodels.)' ), ) def handle(self, *args, **options): if (len(args)<1): raise CommandError('Arguments "file_name" is required!') try: string = open(args[0], 'r').read() except: raise CommandError('There was a problem with opening given input file') import_package_string(self.stdout.write, string, options['replace'], int(options['verbosity'])) self.stdout.write('Import procedure successfully finished.\n') def import_package_string(writeFunc, string, replace, verbosity=1): #get all objects from file and eliminate empty UID and check for UID duplicates objsFileRaw = serializers.deserialize("json", string) objsFile = list(objsFileRaw) #order file models - essential for succesfull import #TODO: following ordering could be done more efficiently objsFile = order_objects_hier_top(objsFile) objsFileNoUid = [x for x in objsFile if len(x.object.uid) == 0] objsFile = [x for x in objsFile if len(x.object.uid) != 0] if len(objsFileNoUid)>0: writeFunc('File contains %i model(s) without UID field set. Those will not be imported! If you wish to' ' assign them random UIDs then use the "-n" option when exporting models with the "export_package"' ' command. Afterwards, you will be able to import them.\n' % len(objsFileNoUid)) if len(Counter([x.object.uid for x in objsFile])) != len(objsFile): a = sorted([x.object.uid for x in objsFile]) for x in a: print x raise CommandError('Input process terminated without any changes to the database. There were multiple equal ' 'UIDs defined on different models in the given input file. The input procedure can not continue ' 'from safety reasons. Please resolve manually!') #divide new objects by type wids = [x for x in objsFile if isinstance(x.object, AbstractWidget)] inps = [x for x in objsFile if isinstance(x.object, AbstractInput)] outs = [x for x in objsFile if isinstance(x.object, AbstractOutput)] opts = [x for x in objsFile if isinstance(x.object, AbstractOption)] cats = [x for x in objsFile if isinstance(x.object, Category)] #ouput statistics about file if verbosity>0: writeFunc('Import file contains:\n') writeFunc(' % 4i AbstractWidget(s)\n' % len(wids)) writeFunc(' % 4i AbstractInput(s)\n' % len(inps)) writeFunc(' % 4i AbstractOutput(s)\n' % len(outs)) writeFunc(' % 4i AbstractOption(s)\n' % len(opts)) writeFunc(' % 4i Category(s)\n' % len(cats)) #get all objects from database objsDb = [] objsDb.extend(AbstractWidget.objects.all()) objsDb.extend(AbstractInput.objects.all()) objsDb.extend(AbstractOutput.objects.all()) objsDb.extend(AbstractOption.objects.all()) objsDb.extend(Category.objects.all()) #check for DB UID duplicates objsdbDict = dict((x.uid,x) for x in objsDb if len(x.uid) != 0) if len([x for x in objsDb if len(x.uid) != 0]) != len(objsdbDict): error_txt= 'Input process terminated without any changes to the database. There were multiple equal ' \ 'UIDs defined on different models in the database. The input procedure can not continue ' \ 'from safety reasons. Please resolve manually! UIDs with multiple models:' #count objects per uid from collections import defaultdict objs_per_uid=defaultdict(list) for x in objsDb: if x.uid: objs_per_uid[x.uid].append(x) for uid,objs in objs_per_uid.items(): if len(objs)>1: error_txt+="\n\nUID: "+str(uid)+"\nobjects: "+str(objs) raise CommandError(error_txt) #create new to existing id mapping and check for type match idMappingDict = dict() for objFile in objsFile: if objsdbDict.has_key(objFile.object.uid): objDb = objsdbDict[objFile.object.uid] objFileTypeId = str(type(objFile.object))+':'+str(objFile.object.id) objDbTypeId = str(type(objDb))+':'+str(objDb.id) if type(objFile.object) == type(objsdbDict[objFile.object.uid]): idMappingDict[objFileTypeId] = objDb.id else: raise CommandError('Input process terminated without any changes to the database. Two models match by uid but not ' 'by type:\n - from file: id: %s uid: %s\n - from database: id: %s uid: %s\n Please resolve manually!'% (objFileTypeId, objFile.object.uid, objDbTypeId, objsdbDict[objFile.object.uid].uid)) #ouput statistics about database if verbosity>0: writeFunc('Current database contains %i models,\n' % len(objsDb)) writeFunc(' of which %i models have UID set,\n' % len(objsdbDict)) writeFunc(' of which %i models match with the imported models and will be updated.\n' % len(idMappingDict)) #prepare statistics statDict = dict([('old:'+str(t),len(t.objects.all())) for t in [AbstractWidget, AbstractInput, AbstractOutput, AbstractOption, Category]]) for modelType in [AbstractWidget, AbstractInput, AbstractOutput, AbstractOption, Category]: for operation in ['mod','add','del']: statDict[operation+':'+str(modelType)]=0 #save models to the database - update the ids for the matching models and remove the ids (to get a new one) for the non matching models #the import needs to be done in specific order! Hierarhically top down - all superiror objects needs to be imported prior importing sub object #order: parent categories>sub categories>widgets>inputs>outputs>options if verbosity>0: writeFunc('Merging file and database models ...' + ('\n' if verbosity>1 else '')) importedUids = dict() for objFile in objsFile: objFileTypeId = str(type(objFile.object))+':'+str(objFile.object.id) if verbosity>1: objFileTypeIdStr = objFileTypeId.replace(":",":"+" "*(47-len(objFileTypeId))) if idMappingDict.has_key(objFileTypeId): writeFunc('updating: ' + objFileTypeIdStr + ' => <db_id>: ' + str(idMappingDict[objFileTypeId]) + '\n') else: writeFunc(' adding: ' + objFileTypeIdStr + '\n') #parent category needs to be already imported and added to idMappingDict if isinstance(objFile.object, Category): if not objFile.object.parent_id is None: objId = idMappingDict[str(Category)+':'+str(objFile.object.parent_id)] if verbosity>2: writeFunc('% 52s'%'rewiring parent category from <file_id>:' + '% 5i'%objFile.object.parent_id + ' => <db_id>: %i\n'%objId) objFile.object.parent = Category.objects.get(id=objId) #widget's category needs to be already imported and added to idMappingDict if isinstance(objFile.object, AbstractWidget): objId = idMappingDict[str(Category) + ':' + str(objFile.object.category_id)] if verbosity>2: writeFunc('% 52s'%'rewiring widget\'s category from <file_id>:' + '% 5i'%objFile.object.category_id + ' => <db_id>: %i\n'%objId) objFile.object.category = Category.objects.get(id=objId) #input/output's widget needs to be already imported and added to idMappingDict if isinstance(objFile.object, AbstractInput) or isinstance(objFile.object, AbstractOutput): objId = idMappingDict[str(AbstractWidget) + ':' + str(objFile.object.widget_id)] if verbosity>2: writeFunc('% 52s'%'rewiring containing widget from <file_id>:' + '% 5i'%objFile.object.widget_id + ' => <db_id>: %i\n'%objId) objFile.object.widget = AbstractWidget.objects.get(id=objId) #options's input needs to be already imported and added to idMappingDict if isinstance(objFile.object, AbstractOption): objId = idMappingDict[str(AbstractInput) + ':' + str(objFile.object.abstract_input_id)] if verbosity>2: writeFunc('% 52s'%'rewiring containing input from <file_id>:' + '% 5i'%objFile.object.abstract_input_id + ' => <db_id>: %i\n'%objId) objFile.object.abstract_input = AbstractInput.objects.get(id=objId) #update existing model or add a new one if idMappingDict.has_key(objFileTypeId): #there is already an existing model with same uid statDict['mod:'+str(type(objFile.object))]+=1 objFile.object.id = idMappingDict[objFileTypeId] else: #there is no model jet, add it statDict['add:'+str(type(objFile.object))]+=1 objFile.object.id = None objFile.save() #actual saving to the DB, if object is new then id is assigend at this point #dictionary bookkeeping idMappingDict[objFileTypeId] = objFile.object.id importedUids[objFile.object.uid]=True if verbosity>0: writeFunc(' done.\n') if replace: if verbosity>0: writeFunc('Removing unnecessary inputs/options/outputs...') for wid in [wid for wid in objsFile if isinstance(wid.object, AbstractWidget)]: for inp in AbstractInput.objects.filter(widget = wid.object.id): for opt in AbstractOption.objects.filter(abstract_input = inp.id): if not importedUids.has_key(opt.uid): statDict['del:'+str(AbstractOption)]+=1 opt.delete() if not importedUids.has_key(inp.uid): statDict['del:'+str(AbstractInput)]+=1 inp.delete() for out in AbstractOutput.objects.filter(widget = wid.object.id): if not importedUids.has_key(out.uid): statDict['del:'+str(AbstractOutput)]+=1 out.delete() if verbosity>0: writeFunc(' done.\n') #update and output statistics if verbosity>0: statDict = dict(statDict.items() + dict([('new:'+str(t),len(t.objects.all())) for t in [AbstractWidget, AbstractInput, AbstractOutput, AbstractOption, Category]]).items()) writeFunc('Database models count statistics: pre-import + ( added | modified | deleted ) = after-import\n') for t in [AbstractWidget, AbstractInput, AbstractOutput, AbstractOption, Category]: writeFunc(' % 15s: % 5i + (% 4i | % 4i | % 4i ) = % 5i\n' % (t.__name__, statDict['old:'+str(t)], statDict['add:'+str(t)], statDict['mod:'+str(t)], statDict['del:'+str(t)], statDict['new:'+str(t)])) def order_objects_hier_top(objsFile): objsFileOrdered = [] for topCat in [x for x in objsFile if (isinstance(x.object, Category) and x.object.parent_id is None)]: objsFileOrdered.extend(order_objects_hier(topCat, objsFile)) return objsFileOrdered def order_objects_hier(cat, objsFile): assert isinstance(cat.object, Category) assert isinstance(objsFile, list) objsFileOrdered = [] objsFileOrdered.append(cat) for wid in [x for x in objsFile if (isinstance(x.object, AbstractWidget) and x.object.category_id == cat.object.id)]: objsFileOrdered.append(wid) for inp in [x for x in objsFile if (isinstance(x.object, AbstractInput) and x.object.widget_id == wid.object.id)]: objsFileOrdered.append(inp) for opt in [x for x in objsFile if (isinstance(x.object, AbstractOption) and x.object.abstract_input_id == inp.object.id)]: objsFileOrdered.append(opt) for outp in [x for x in objsFile if (isinstance(x.object, AbstractOutput) and x.object.widget_id == wid.object.id)]: objsFileOrdered.append(outp) for subCat in [x for x in objsFile if (isinstance(x.object, Category) and x.object.parent_id == cat.object.id)]: objsFileOrdered.extend(order_objects_hier(subCat,objsFile)) return objsFileOrdered
gpl-3.0
Northrend/mxnet
tests/python/unittest/test_gluon_model_zoo.py
3
2171
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import print_function import mxnet as mx from mxnet.gluon.model_zoo.vision import get_model import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def test_models(): all_models = ['resnet18_v1', 'resnet34_v1', 'resnet50_v1', 'resnet101_v1', 'resnet152_v1', 'resnet18_v2', 'resnet34_v2', 'resnet50_v2', 'resnet101_v2', 'resnet152_v2', 'vgg11', 'vgg13', 'vgg16', 'vgg19', 'vgg11_bn', 'vgg13_bn', 'vgg16_bn', 'vgg19_bn', 'alexnet', 'inceptionv3', 'densenet121', 'densenet161', 'densenet169', 'densenet201', 'squeezenet1.0', 'squeezenet1.1', 'mobilenet1.0', 'mobilenet0.75', 'mobilenet0.5', 'mobilenet0.25'] pretrained_to_test = set(['squeezenet1.1']) for model_name in all_models: test_pretrain = True #model_name in pretrained_to_test model = get_model(model_name, pretrained=test_pretrain, root='model/') data_shape = (2, 3, 224, 224) if 'inception' not in model_name else (2, 3, 299, 299) eprint('testing forward for %s'%model_name) print(model) if not test_pretrain: model.collect_params().initialize() model(mx.nd.random.uniform(shape=data_shape)).wait_to_read() if __name__ == '__main__': import nose nose.runmodule()
apache-2.0
hlmnrmr/superdesk-core
apps/content_filters/filter_condition/filter_condition.py
7
2767
# -*- coding: utf-8; -*- # # This file is part of Superdesk. # # Copyright 2013, 2014 Sourcefabric z.u. and contributors. # # For the full copyright and license information, please see the # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license from apps.content_filters.filter_condition.filter_condition_field import FilterConditionField from apps.content_filters.filter_condition.filter_condition_value import FilterConditionValue from apps.content_filters.filter_condition.filter_condition_operator import \ FilterConditionOperator, NotInOperator, NotLikeOperator, MatchOperator, \ FilterConditionOperatorsEnum, ComparisonOperator import json class FilterCondition: def __init__(self, field, operator, value): self.field = FilterConditionField.factory(field) self.operator = FilterConditionOperator.factory(operator) self.value = FilterConditionValue(self.operator, value) @staticmethod def parse(filter_condition): return FilterCondition(filter_condition['field'], filter_condition['operator'], filter_condition['value']) def get_mongo_query(self): try: return self.field.get_mongo_query() except AttributeError: field = self.field.get_entity_name() operator = self.operator.get_mongo_operator() value = self.value.get_value(self.field, self.operator) return {field: {operator: value}} def get_elastic_query(self): try: return self.field.get_elastic_query() except AttributeError: operator = self.operator.get_elastic_operator() value, field = self.value.get_elastic_value(self.field, self.operator) if isinstance(self.operator, MatchOperator) or isinstance(self.operator, ComparisonOperator): return json.loads(operator.format(field, value)) else: return {operator: {field: value}} def contains_not(self): return self.operator.contains_not() def does_match(self, article): if not self.field.is_in_article(article): return type(self.operator) is NotInOperator or \ type(self.operator) is NotLikeOperator or \ self.operator.operator is FilterConditionOperatorsEnum.ne or \ (self.operator.operator is FilterConditionOperatorsEnum.eq and self.value.value.lower() in ("no", "false", "f", "0")) article_value = self.field.get_value(article) filter_value = self.value.get_value(self.field, self.operator) return self.operator.does_match(article_value, filter_value)
agpl-3.0
VirusTotal/content
Packs/AccentureCTI_Feed/Integrations/ACTIIndicatorFeed/ACTIIndicatorFeed.py
2
7053
from typing import Dict, Union from CommonServerPython import * from JSONFeedApiModule import * # noqa: E402 def custom_build_iterator(client: Client, feed: Dict, limit, **kwargs) -> List: """ Implement the http_request with API that works with pagination and filtering. Uses the integration context to save last fetch time to each indicator type Args: client: Client manage all http requests feed: dictionary holds all data needed to the specific service (Services- IP, Domain, URL) limit: maximum number of indicators to fetch Returns: list of indicators returned from api. Each indicator is represented in dictionary """ fetch_time = demisto.params().get('fetch_time', '14 days') params: dict = feed.get('filters', {}) current_indicator_type = feed.get('indicator_type', '') start_date, end_date = parse_date_range(fetch_time, utc=True) integration_context = get_integration_context() last_fetch = integration_context.get(f'{current_indicator_type}_fetch_time') if last_fetch: start_date = last_fetch # pragma: no cover page_number = 1 params['end_date'] = end_date params['start_date'] = start_date params['page_size'] = 200 if not limit: limit = 20000 # This limit was added to make sure we do not hit a timeout on the fetch integration_context[f'{current_indicator_type}_fetch_time'] = str(params['end_date']) set_integration_context(integration_context) more_indicators = True result: list = [] while more_indicators: params['page'] = page_number demisto.debug(f"Initiating API call to ACTI with url: {feed.get('url', client.url)} ,with parameters: " f"{params} and page number: {page_number} ") try: r = requests.get( url=feed.get('url', client.url), verify=client.verify, auth=client.auth, cert=client.cert, headers=client.headers, params=params, **kwargs ) r.raise_for_status() data = r.json() if data.get('total_size'): result.extend(jmespath.search(expression=feed.get('extractor'), data=data)) more_indicators = data.get('more') page_number += 1 if len(result) >= limit: break # pragma: no cover except ValueError as VE: raise ValueError(f'Could not parse returned data to Json. \n\nError massage: {VE}') # pragma: no cover except TypeError as TE: raise TypeError(f'Error massage: {TE}\n\n Try To check extractor value') except ConnectionError as exception: # pragma: no cover # Get originating Exception in Exception chain error_class = str(exception.__class__) # pragma: no cover err_type = f"""<{error_class[error_class.find("'") + 1: error_class.rfind("'")]}>""" # pragma: no cover err_msg = 'Verify that the server URL parameter' \ ' is correct and that you have access to the server from your host.' \ '\nError Type: {}\nError Number: [{}]\nMessage: {}\n' \ .format(err_type, exception.errno, exception.strerror) raise DemistoException(err_msg, exception) # pragma: no cover demisto.debug(f"Received in total {len(result)} indicators from ACTI Feed") return result def create_fetch_configuration(indicators_type: list, filters: dict, params: dict) -> Dict[str, dict]: mapping_by_indicator_type = { # pragma: no cover 'IP': { 'last_seen_as': 'malwaretypes', 'threat_types': 'primarymotivation', 'malware_family': 'malwarefamily', 'severity': 'sourceoriginalseverity'}, 'Domain': { 'last_seen_as': 'malwaretypes', 'threat_types': 'primarymotivation', 'malware_family': 'malwarefamily', 'severity': 'sourceoriginalseverity'}, 'URL': { 'last_seen_as': 'malwaretypes', 'threat_types': 'primarymotivation', 'malware_family': 'malwarefamily', 'severity': 'sourceoriginalseverity'} } url_by_type = {"IP": 'https://api.intelgraph.idefense.com/rest/threatindicator/v0/ip', # pragma: no cover "Domain": 'https://api.intelgraph.idefense.com/rest/threatindicator/v0/domain', "URL": 'https://api.intelgraph.idefense.com/rest/threatindicator/v0/url'} common_conf = {'extractor': 'results', 'indicator': 'display_text', 'insecure': params.get('insecure', False), 'custom_build_iterator': custom_build_iterator, 'filters': filters} indicators_configuration = {} for ind in indicators_type: indicators_configuration[ind] = dict(common_conf) indicators_configuration[ind].update({'url': url_by_type[ind]}) indicators_configuration[ind].update({'indicator_type': ind}) indicators_configuration[ind].update({'mapping': mapping_by_indicator_type[ind]}) return indicators_configuration def build_feed_filters(params: dict) -> Dict[str, Optional[Union[str, list]]]: filters = {'severity.from': params.get('severity'), 'threat_types.values': params.get('threat_type'), 'confidence.from': params.get('confidence_from'), 'malware_family.values': params.get('malware_family', '').split(',') if params.get('malware_family') else None} return {k: v for k, v in filters.items() if v is not None} def main(): # pragma: no cover params = demisto.params() filters: Dict[str, Optional[Union[str, list]]] = build_feed_filters(params) indicators_type: list = argToList(params.get('indicator_type', [])) params['feed_name_to_config'] = create_fetch_configuration(indicators_type, filters, params) PACK_VERSION = get_pack_version() DEMISTO_VERSION = demisto.demistoVersion() DEMISTO_VERSION = f'{DEMISTO_VERSION["version"]}.{DEMISTO_VERSION["buildNumber"]}' params['headers'] = {"Content-Type": "application/json", 'auth-token': params.get('api_token').get("password"), 'User-Agent': f'AccentureCTI Pack/{PACK_VERSION} Palo Alto XSOAR/{DEMISTO_VERSION}'} feed_main(params, 'ACTI Indicator Feed', 'acti') if __name__ in ('__main__', '__builtin__', 'builtins'): main()
mit
salguarnieri/intellij-community
python/lib/Lib/site-packages/django/contrib/gis/geos/prototypes/io.py
312
8473
import threading from ctypes import byref, c_char_p, c_int, c_char, c_size_t, Structure, POINTER from django.contrib.gis.geos.base import GEOSBase from django.contrib.gis.geos.libgeos import GEOM_PTR from django.contrib.gis.geos.prototypes.errcheck import check_geom, check_string, check_sized_string from django.contrib.gis.geos.prototypes.geom import c_uchar_p, geos_char_p from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc ### The WKB/WKT Reader/Writer structures and pointers ### class WKTReader_st(Structure): pass class WKTWriter_st(Structure): pass class WKBReader_st(Structure): pass class WKBWriter_st(Structure): pass WKT_READ_PTR = POINTER(WKTReader_st) WKT_WRITE_PTR = POINTER(WKTWriter_st) WKB_READ_PTR = POINTER(WKBReader_st) WKB_WRITE_PTR = POINTER(WKBReader_st) ### WKTReader routines ### wkt_reader_create = GEOSFunc('GEOSWKTReader_create') wkt_reader_create.restype = WKT_READ_PTR wkt_reader_destroy = GEOSFunc('GEOSWKTReader_destroy') wkt_reader_destroy.argtypes = [WKT_READ_PTR] wkt_reader_read = GEOSFunc('GEOSWKTReader_read') wkt_reader_read.argtypes = [WKT_READ_PTR, c_char_p] wkt_reader_read.restype = GEOM_PTR wkt_reader_read.errcheck = check_geom ### WKTWriter routines ### wkt_writer_create = GEOSFunc('GEOSWKTWriter_create') wkt_writer_create.restype = WKT_WRITE_PTR wkt_writer_destroy = GEOSFunc('GEOSWKTWriter_destroy') wkt_writer_destroy.argtypes = [WKT_WRITE_PTR] wkt_writer_write = GEOSFunc('GEOSWKTWriter_write') wkt_writer_write.argtypes = [WKT_WRITE_PTR, GEOM_PTR] wkt_writer_write.restype = geos_char_p wkt_writer_write.errcheck = check_string ### WKBReader routines ### wkb_reader_create = GEOSFunc('GEOSWKBReader_create') wkb_reader_create.restype = WKB_READ_PTR wkb_reader_destroy = GEOSFunc('GEOSWKBReader_destroy') wkb_reader_destroy.argtypes = [WKB_READ_PTR] def wkb_read_func(func): # Although the function definitions take `const unsigned char *` # as their parameter, we use c_char_p here so the function may # take Python strings directly as parameters. Inside Python there # is not a difference between signed and unsigned characters, so # it is not a problem. func.argtypes = [WKB_READ_PTR, c_char_p, c_size_t] func.restype = GEOM_PTR func.errcheck = check_geom return func wkb_reader_read = wkb_read_func(GEOSFunc('GEOSWKBReader_read')) wkb_reader_read_hex = wkb_read_func(GEOSFunc('GEOSWKBReader_readHEX')) ### WKBWriter routines ### wkb_writer_create = GEOSFunc('GEOSWKBWriter_create') wkb_writer_create.restype = WKB_WRITE_PTR wkb_writer_destroy = GEOSFunc('GEOSWKBWriter_destroy') wkb_writer_destroy.argtypes = [WKB_WRITE_PTR] # WKB Writing prototypes. def wkb_write_func(func): func.argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)] func.restype = c_uchar_p func.errcheck = check_sized_string return func wkb_writer_write = wkb_write_func(GEOSFunc('GEOSWKBWriter_write')) wkb_writer_write_hex = wkb_write_func(GEOSFunc('GEOSWKBWriter_writeHEX')) # WKBWriter property getter/setter prototypes. def wkb_writer_get(func, restype=c_int): func.argtypes = [WKB_WRITE_PTR] func.restype = restype return func def wkb_writer_set(func, argtype=c_int): func.argtypes = [WKB_WRITE_PTR, argtype] return func wkb_writer_get_byteorder = wkb_writer_get(GEOSFunc('GEOSWKBWriter_getByteOrder')) wkb_writer_set_byteorder = wkb_writer_set(GEOSFunc('GEOSWKBWriter_setByteOrder')) wkb_writer_get_outdim = wkb_writer_get(GEOSFunc('GEOSWKBWriter_getOutputDimension')) wkb_writer_set_outdim = wkb_writer_set(GEOSFunc('GEOSWKBWriter_setOutputDimension')) wkb_writer_get_include_srid = wkb_writer_get(GEOSFunc('GEOSWKBWriter_getIncludeSRID'), restype=c_char) wkb_writer_set_include_srid = wkb_writer_set(GEOSFunc('GEOSWKBWriter_setIncludeSRID'), argtype=c_char) ### Base I/O Class ### class IOBase(GEOSBase): "Base class for GEOS I/O objects." def __init__(self): # Getting the pointer with the constructor. self.ptr = self._constructor() def __del__(self): # Cleaning up with the appropriate destructor. if self._ptr: self._destructor(self._ptr) ### Base WKB/WKT Reading and Writing objects ### # Non-public WKB/WKT reader classes for internal use because # their `read` methods return _pointers_ instead of GEOSGeometry # objects. class _WKTReader(IOBase): _constructor = wkt_reader_create _destructor = wkt_reader_destroy ptr_type = WKT_READ_PTR def read(self, wkt): if not isinstance(wkt, basestring): raise TypeError return wkt_reader_read(self.ptr, wkt) class _WKBReader(IOBase): _constructor = wkb_reader_create _destructor = wkb_reader_destroy ptr_type = WKB_READ_PTR def read(self, wkb): "Returns a _pointer_ to C GEOS Geometry object from the given WKB." if isinstance(wkb, buffer): wkb_s = str(wkb) return wkb_reader_read(self.ptr, wkb_s, len(wkb_s)) elif isinstance(wkb, basestring): return wkb_reader_read_hex(self.ptr, wkb, len(wkb)) else: raise TypeError ### WKB/WKT Writer Classes ### class WKTWriter(IOBase): _constructor = wkt_writer_create _destructor = wkt_writer_destroy ptr_type = WKT_WRITE_PTR def write(self, geom): "Returns the WKT representation of the given geometry." return wkt_writer_write(self.ptr, geom.ptr) class WKBWriter(IOBase): _constructor = wkb_writer_create _destructor = wkb_writer_destroy ptr_type = WKB_WRITE_PTR def write(self, geom): "Returns the WKB representation of the given geometry." return buffer(wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t()))) def write_hex(self, geom): "Returns the HEXEWKB representation of the given geometry." return wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t())) ### WKBWriter Properties ### # Property for getting/setting the byteorder. def _get_byteorder(self): return wkb_writer_get_byteorder(self.ptr) def _set_byteorder(self, order): if not order in (0, 1): raise ValueError('Byte order parameter must be 0 (Big Endian) or 1 (Little Endian).') wkb_writer_set_byteorder(self.ptr, order) byteorder = property(_get_byteorder, _set_byteorder) # Property for getting/setting the output dimension. def _get_outdim(self): return wkb_writer_get_outdim(self.ptr) def _set_outdim(self, new_dim): if not new_dim in (2, 3): raise ValueError('WKB output dimension must be 2 or 3') wkb_writer_set_outdim(self.ptr, new_dim) outdim = property(_get_outdim, _set_outdim) # Property for getting/setting the include srid flag. def _get_include_srid(self): return bool(ord(wkb_writer_get_include_srid(self.ptr))) def _set_include_srid(self, include): if bool(include): flag = chr(1) else: flag = chr(0) wkb_writer_set_include_srid(self.ptr, flag) srid = property(_get_include_srid, _set_include_srid) # `ThreadLocalIO` object holds instances of the WKT and WKB reader/writer # objects that are local to the thread. The `GEOSGeometry` internals # access these instances by calling the module-level functions, defined # below. class ThreadLocalIO(threading.local): wkt_r = None wkt_w = None wkb_r = None wkb_w = None ewkb_w = None ewkb_w3d = None thread_context = ThreadLocalIO() # These module-level routines return the I/O object that is local to the # the thread. If the I/O object does not exist yet it will be initialized. def wkt_r(): if not thread_context.wkt_r: thread_context.wkt_r = _WKTReader() return thread_context.wkt_r def wkt_w(): if not thread_context.wkt_w: thread_context.wkt_w = WKTWriter() return thread_context.wkt_w def wkb_r(): if not thread_context.wkb_r: thread_context.wkb_r = _WKBReader() return thread_context.wkb_r def wkb_w(): if not thread_context.wkb_w: thread_context.wkb_w = WKBWriter() return thread_context.wkb_w def ewkb_w(): if not thread_context.ewkb_w: thread_context.ewkb_w = WKBWriter() thread_context.ewkb_w.srid = True return thread_context.ewkb_w def ewkb_w3d(): if not thread_context.ewkb_w3d: thread_context.ewkb_w3d = WKBWriter() thread_context.ewkb_w3d.srid = True thread_context.ewkb_w3d.outdim = 3 return thread_context.ewkb_w3d
apache-2.0
andymckay/django
django/contrib/formtools/tests/wizard/wizardtests/tests.py
5
15496
import os from django import forms from django.test import TestCase from django.test.client import RequestFactory from django.conf import settings from django.contrib.auth.models import User from django.contrib.formtools.wizard.views import CookieWizardView from django.contrib.formtools.tests.wizard.forms import UserForm, UserFormSet class WizardTests(object): urls = 'django.contrib.formtools.tests.wizard.wizardtests.urls' def setUp(self): self.testuser, created = User.objects.get_or_create(username='testuser1') self.wizard_step_data[0]['form1-user'] = self.testuser.pk def test_initial_call(self): response = self.client.get(self.wizard_url) wizard = response.context['wizard'] self.assertEqual(response.status_code, 200) self.assertEqual(wizard['steps'].current, 'form1') self.assertEqual(wizard['steps'].step0, 0) self.assertEqual(wizard['steps'].step1, 1) self.assertEqual(wizard['steps'].last, 'form4') self.assertEqual(wizard['steps'].prev, None) self.assertEqual(wizard['steps'].next, 'form2') self.assertEqual(wizard['steps'].count, 4) def test_form_post_error(self): response = self.client.post(self.wizard_url, self.wizard_step_1_data) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form1') self.assertEqual(response.context['wizard']['form'].errors, {'name': [u'This field is required.'], 'user': [u'This field is required.']}) def test_form_post_success(self): response = self.client.post(self.wizard_url, self.wizard_step_data[0]) wizard = response.context['wizard'] self.assertEqual(response.status_code, 200) self.assertEqual(wizard['steps'].current, 'form2') self.assertEqual(wizard['steps'].step0, 1) self.assertEqual(wizard['steps'].prev, 'form1') self.assertEqual(wizard['steps'].next, 'form3') def test_form_stepback(self): response = self.client.get(self.wizard_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form1') response = self.client.post(self.wizard_url, self.wizard_step_data[0]) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form2') response = self.client.post(self.wizard_url, { 'wizard_goto_step': response.context['wizard']['steps'].prev}) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form1') def test_template_context(self): response = self.client.get(self.wizard_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form1') self.assertEqual(response.context.get('another_var', None), None) response = self.client.post(self.wizard_url, self.wizard_step_data[0]) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form2') self.assertEqual(response.context.get('another_var', None), True) def test_form_finish(self): response = self.client.get(self.wizard_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form1') response = self.client.post(self.wizard_url, self.wizard_step_data[0]) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form2') post_data = self.wizard_step_data[1] post_data['form2-file1'] = open(__file__) response = self.client.post(self.wizard_url, post_data) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form3') response = self.client.post(self.wizard_url, self.wizard_step_data[2]) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form4') response = self.client.post(self.wizard_url, self.wizard_step_data[3]) self.assertEqual(response.status_code, 200) all_data = response.context['form_list'] self.assertEqual(all_data[1]['file1'].read(), open(__file__).read()) del all_data[1]['file1'] self.assertEqual(all_data, [ {'name': u'Pony', 'thirsty': True, 'user': self.testuser}, {'address1': u'123 Main St', 'address2': u'Djangoland'}, {'random_crap': u'blah blah'}, [{'random_crap': u'blah blah'}, {'random_crap': u'blah blah'}]]) def test_cleaned_data(self): response = self.client.get(self.wizard_url) self.assertEqual(response.status_code, 200) response = self.client.post(self.wizard_url, self.wizard_step_data[0]) self.assertEqual(response.status_code, 200) post_data = self.wizard_step_data[1] post_data['form2-file1'] = open(__file__) response = self.client.post(self.wizard_url, post_data) self.assertEqual(response.status_code, 200) response = self.client.post(self.wizard_url, self.wizard_step_data[2]) self.assertEqual(response.status_code, 200) response = self.client.post(self.wizard_url, self.wizard_step_data[3]) self.assertEqual(response.status_code, 200) all_data = response.context['all_cleaned_data'] self.assertEqual(all_data['file1'].read(), open(__file__).read()) del all_data['file1'] self.assertEqual(all_data, { 'name': u'Pony', 'thirsty': True, 'user': self.testuser, 'address1': u'123 Main St', 'address2': u'Djangoland', 'random_crap': u'blah blah', 'formset-form4': [ {'random_crap': u'blah blah'}, {'random_crap': u'blah blah'}]}) def test_manipulated_data(self): response = self.client.get(self.wizard_url) self.assertEqual(response.status_code, 200) response = self.client.post(self.wizard_url, self.wizard_step_data[0]) self.assertEqual(response.status_code, 200) post_data = self.wizard_step_data[1] post_data['form2-file1'] = open(__file__) response = self.client.post(self.wizard_url, post_data) self.assertEqual(response.status_code, 200) response = self.client.post(self.wizard_url, self.wizard_step_data[2]) self.assertEqual(response.status_code, 200) self.client.cookies.pop('sessionid', None) self.client.cookies.pop('wizard_cookie_contact_wizard', None) response = self.client.post(self.wizard_url, self.wizard_step_data[3]) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form1') def test_form_refresh(self): response = self.client.get(self.wizard_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form1') response = self.client.post(self.wizard_url, self.wizard_step_data[0]) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form2') response = self.client.post(self.wizard_url, self.wizard_step_data[0]) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form2') post_data = self.wizard_step_data[1] post_data['form2-file1'] = open(__file__) response = self.client.post(self.wizard_url, post_data) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form3') response = self.client.post(self.wizard_url, self.wizard_step_data[2]) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form4') response = self.client.post(self.wizard_url, self.wizard_step_data[0]) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['wizard']['steps'].current, 'form2') response = self.client.post(self.wizard_url, self.wizard_step_data[3]) self.assertEqual(response.status_code, 200) class SessionWizardTests(WizardTests, TestCase): wizard_url = '/wiz_session/' wizard_step_1_data = { 'session_contact_wizard-current_step': 'form1', } wizard_step_data = ( { 'form1-name': 'Pony', 'form1-thirsty': '2', 'session_contact_wizard-current_step': 'form1', }, { 'form2-address1': '123 Main St', 'form2-address2': 'Djangoland', 'session_contact_wizard-current_step': 'form2', }, { 'form3-random_crap': 'blah blah', 'session_contact_wizard-current_step': 'form3', }, { 'form4-INITIAL_FORMS': '0', 'form4-TOTAL_FORMS': '2', 'form4-MAX_NUM_FORMS': '0', 'form4-0-random_crap': 'blah blah', 'form4-1-random_crap': 'blah blah', 'session_contact_wizard-current_step': 'form4', } ) class CookieWizardTests(WizardTests, TestCase): wizard_url = '/wiz_cookie/' wizard_step_1_data = { 'cookie_contact_wizard-current_step': 'form1', } wizard_step_data = ( { 'form1-name': 'Pony', 'form1-thirsty': '2', 'cookie_contact_wizard-current_step': 'form1', }, { 'form2-address1': '123 Main St', 'form2-address2': 'Djangoland', 'cookie_contact_wizard-current_step': 'form2', }, { 'form3-random_crap': 'blah blah', 'cookie_contact_wizard-current_step': 'form3', }, { 'form4-INITIAL_FORMS': '0', 'form4-TOTAL_FORMS': '2', 'form4-MAX_NUM_FORMS': '0', 'form4-0-random_crap': 'blah blah', 'form4-1-random_crap': 'blah blah', 'cookie_contact_wizard-current_step': 'form4', } ) class WizardTestKwargs(TestCase): wizard_url = '/wiz_other_template/' wizard_step_1_data = { 'cookie_contact_wizard-current_step': 'form1', } wizard_step_data = ( { 'form1-name': 'Pony', 'form1-thirsty': '2', 'cookie_contact_wizard-current_step': 'form1', }, { 'form2-address1': '123 Main St', 'form2-address2': 'Djangoland', 'cookie_contact_wizard-current_step': 'form2', }, { 'form3-random_crap': 'blah blah', 'cookie_contact_wizard-current_step': 'form3', }, { 'form4-INITIAL_FORMS': '0', 'form4-TOTAL_FORMS': '2', 'form4-MAX_NUM_FORMS': '0', 'form4-0-random_crap': 'blah blah', 'form4-1-random_crap': 'blah blah', 'cookie_contact_wizard-current_step': 'form4', } ) urls = 'django.contrib.formtools.tests.wizard.wizardtests.urls' def setUp(self): self.testuser, created = User.objects.get_or_create(username='testuser1') self.wizard_step_data[0]['form1-user'] = self.testuser.pk def test_template(self): templates = os.path.join(os.path.dirname(__file__), 'templates') with self.settings( TEMPLATE_DIRS=list(settings.TEMPLATE_DIRS) + [templates]): response = self.client.get(self.wizard_url) self.assertTemplateUsed(response, 'other_wizard_form.html') class WizardTestGenericViewInterface(TestCase): def test_get_context_data_inheritance(self): class TestWizard(CookieWizardView): """ A subclass that implements ``get_context_data`` using the standard protocol for generic views (accept only **kwargs). See ticket #17148. """ def get_context_data(self, **kwargs): context = super(TestWizard, self).get_context_data(**kwargs) context['test_key'] = 'test_value' return context factory = RequestFactory() view = TestWizard.as_view([forms.Form]) response = view(factory.get('/')) self.assertEquals(response.context_data['test_key'], 'test_value') def test_get_context_data_with_mixin(self): class AnotherMixin(object): def get_context_data(self, **kwargs): context = super(AnotherMixin, self).get_context_data(**kwargs) context['another_key'] = 'another_value' return context class TestWizard(AnotherMixin, CookieWizardView): """ A subclass that implements ``get_context_data`` using the standard protocol for generic views (accept only **kwargs). See ticket #17148. """ def get_context_data(self, **kwargs): context = super(TestWizard, self).get_context_data(**kwargs) context['test_key'] = 'test_value' return context factory = RequestFactory() view = TestWizard.as_view([forms.Form]) response = view(factory.get('/')) self.assertEquals(response.context_data['test_key'], 'test_value') self.assertEquals(response.context_data['another_key'], 'another_value') class WizardFormKwargsOverrideTests(TestCase): def setUp(self): super(WizardFormKwargsOverrideTests, self).setUp() self.rf = RequestFactory() # Create two users so we can filter by is_staff when handing our # wizard a queryset keyword argument. self.normal_user = User.objects.create(username='test1', email='normal@example.com') self.staff_user = User.objects.create(username='test2', email='staff@example.com', is_staff=True) def test_instance_is_maintained(self): self.assertEqual(2, User.objects.count()) queryset = User.objects.get(pk=self.staff_user.pk) class InstanceOverrideWizard(CookieWizardView): def get_form_kwargs(self, step): return {'instance': queryset} view = InstanceOverrideWizard.as_view([UserForm]) response = view(self.rf.get('/')) form = response.context_data['wizard']['form'] self.assertNotEqual(form.instance.pk, None) self.assertEqual(form.instance.pk, self.staff_user.pk) self.assertEqual('staff@example.com', form.initial.get('email', None)) def test_queryset_is_maintained(self): queryset = User.objects.filter(pk=self.staff_user.pk) class QuerySetOverrideWizard(CookieWizardView): def get_form_kwargs(self, step): return {'queryset': queryset} view = QuerySetOverrideWizard.as_view([UserFormSet]) response = view(self.rf.get('/')) formset = response.context_data['wizard']['form'] self.assertNotEqual(formset.queryset, None) self.assertEqual(formset.initial_form_count(), 1) self.assertEqual(['staff@example.com'], list(formset.queryset.values_list('email', flat=True)))
bsd-3-clause
mandeepdhami/horizon
openstack_dashboard/enabled/_250_identity_users_panel.py
24
1144
# Copyright 2015 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # The slug of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'identity' # The slug of the panel group the PANEL is associated with. # If you want the panel to show up without a panel group, # use the panel group "default". PANEL_GROUP = 'default' # The slug of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'ngusers' # If set to True, this settings file will not be added to the settings. DISABLED = True # Python panel class of the PANEL to be added. ADD_PANEL = ('openstack_dashboard.dashboards.identity.ngusers.panel.NGUsers')
apache-2.0
jcmgray/xarray
doc/gallery/plot_cartopy_facetgrid.py
3
1379
# -*- coding: utf-8 -*- """ ================================== Multiple plots and map projections ================================== Control the map projection parameters on multiple axes This example illustrates how to plot multiple maps and control their extent and aspect ratio. For more details see `this discussion`_ on github. .. _this discussion: https://github.com/pydata/xarray/issues/1397#issuecomment-299190567 """ # noqa from __future__ import division import cartopy.crs as ccrs import matplotlib.pyplot as plt import xarray as xr # Load the data ds = xr.tutorial.load_dataset('air_temperature') air = ds.air.isel(time=[0, 724]) - 273.15 # This is the map projection we want to plot *onto* map_proj = ccrs.LambertConformal(central_longitude=-95, central_latitude=45) p = air.plot(transform=ccrs.PlateCarree(), # the data's projection col='time', col_wrap=1, # multiplot settings aspect=ds.dims['lon'] / ds.dims['lat'], # for a sensible figsize subplot_kws={'projection': map_proj}) # the plot's projection # We have to set the map's options on all four axes for ax in p.axes.flat: ax.coastlines() ax.set_extent([-160, -30, 5, 75]) # Without this aspect attributes the maps will look chaotic and the # "extent" attribute above will be ignored ax.set_aspect('equal', 'box-forced') plt.show()
apache-2.0
tammoippen/nest-simulator
pynest/nest/tests/test_status.py
5
7739
# -*- coding: utf-8 -*- # # test_status.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. """ Test if Set/GetStatus work properly """ import unittest import nest @nest.ll_api.check_stack class StatusTestCase(unittest.TestCase): """Tests of Set/GetStatus""" def test_GetKernelStatus(self): """GetKernelStatus""" nest.ResetKernel() kernel_status = nest.GetKernelStatus() self.assertIsInstance(kernel_status, dict) self.assertGreater(len(kernel_status), 1) self.assertRaises(KeyError, nest.GetKernelStatus, "nonexistent_status_key") test_keys = ("resolution", ) * 3 kernel_status = nest.GetKernelStatus(test_keys) self.assertEqual(len(kernel_status), len(test_keys)) self.assertRaises(TypeError, nest.GetKernelStatus, 42) def test_SetKernelStatus(self): """SetKernelStatus""" nest.ResetKernel() nest.SetKernelStatus({}) nest.SetKernelStatus({'resolution': 0.2}) self.assertRaisesRegex( nest.kernel.NESTError, "DictError", nest.SetKernelStatus, {'nonexistent_status_key': 0}) def test_GetDefaults(self): """GetDefaults""" nest.ResetKernel() # sli_neuron does not work under PyNEST models = (m for m in nest.Models() if m != 'sli_neuron') for model in models: model_status = nest.GetDefaults(model) self.assertIsInstance(model_status, dict) self.assertGreater(len(model_status), 1) self.assertRaises(TypeError, nest.GetDefaults, model, 42) if "V_m" in model_status: test_value = nest.GetDefaults(model, "V_m") self.assertIsInstance(test_value, float) test_keys = ("V_m", ) * 3 model_status = nest.GetDefaults(model, test_keys) self.assertEqual(len(model_status), len(test_keys)) def test_SetDefaults(self): """SetDefaults""" nest.ResetKernel() # sli_neuron does not work under PyNEST models = (m for m in nest.Models() if m != 'sli_neuron') for m in models: if 'V_m' in nest.GetDefaults(m): v_m = nest.GetDefaults(m)['V_m'] nest.SetDefaults(m, {'V_m': -1.}) self.assertEqual(nest.GetDefaults(m, 'V_m'), -1.) nest.SetDefaults(m, 'V_m', v_m) self.assertEqual(nest.GetDefaults(m, 'V_m'), v_m) self.assertRaisesRegex( nest.kernel.NESTError, "DictError", nest.SetDefaults, m, 'nonexistent_status_key', 0) def test_GetStatus(self): """GetStatus""" # sli_neuron does not work under PyNEST models = (m for m in nest.Models() if m != 'sli_neuron') for m in models: if 'V_m' in nest.GetDefaults(m): nest.ResetKernel() n = nest.Create(m) d = nest.GetStatus(n) self.assertIsInstance(d, tuple) self.assertIsInstance(d[0], dict) self.assertGreater(len(d[0]), 1) v1 = nest.GetStatus(n)[0]['V_m'] v2 = nest.GetStatus(n, 'V_m')[0] self.assertEqual(v1, v2) n = nest.Create(m, 10) d = nest.GetStatus(n, 'V_m') self.assertEqual(len(d), len(n)) self.assertIsInstance(d[0], float) test_keys = ("V_m", ) * 3 d = nest.GetStatus(n, test_keys) self.assertEqual(len(d), len(n)) self.assertEqual(len(d[0]), len(test_keys)) def test_SetStatus(self): """SetStatus with dict""" # sli_neuron does not work under PyNEST models = (m for m in nest.Models() if m != 'sli_neuron') for m in models: if 'V_m' in nest.GetDefaults(m): nest.ResetKernel() n = nest.Create(m) nest.SetStatus(n, {'V_m': 1.}) self.assertEqual(nest.GetStatus(n, 'V_m')[0], 1.) def test_SetStatusList(self): """SetStatus with list""" # sli_neuron does not work under PyNEST models = (m for m in nest.Models() if m != 'sli_neuron') for m in models: if 'V_m' in nest.GetDefaults(m): nest.ResetKernel() n = nest.Create(m) nest.SetStatus(n, [{'V_m': 2.}]) self.assertEqual(nest.GetStatus(n, 'V_m')[0], 2.) def test_SetStatusParam(self): """SetStatus with parameter""" # sli_neuron does not work under PyNEST models = (m for m in nest.Models() if m != 'sli_neuron') for m in models: if 'V_m' in nest.GetDefaults(m): nest.ResetKernel() n = nest.Create(m) nest.SetStatus(n, 'V_m', 3.) self.assertEqual(nest.GetStatus(n, 'V_m')[0], 3.) def test_SetStatusVth_E_L(self): """SetStatus of reversal and threshold potential """ # sli_neuron does not work under PyNEST models = ( m for m in nest.Models() if m not in ( 'sli_neuron', 'a2eif_cond_exp_HW', 'mat2_psc_exp', 'amat2_psc_exp' ) ) for m in models: if all(key in nest.GetDefaults(m) for key in ('V_th', 'E_L')): nest.ResetKernel() neuron1 = nest.Create(m) neuron2 = nest.Create(m) # must not depend on the order new_EL = -90. new_Vth = -10. if 'V_reset' in nest.GetDefaults(m): nest.SetStatus(neuron1 + neuron2, {'V_reset': new_EL}) nest.SetStatus(neuron1, {'E_L': new_EL}) nest.SetStatus(neuron2, {'V_th': new_Vth}) nest.SetStatus(neuron1, {'V_th': new_Vth}) nest.SetStatus(neuron2, {'E_L': new_EL}) vth1, vth2 = nest.GetStatus(neuron1 + neuron2, 'V_th') self.assertEqual(vth1, vth2) def test_SetStatusV_th_smaller_V_reset(self): """SetStatus of reversal and threshold potential check if error is raised if V_reset > V_th""" # sli_neuron does not work under PyNEST models = (m for m in nest.Models() if m != 'sli_neuron') for m in models: if all(key in nest.GetDefaults(m) for key in ('V_th', 'V_reset')): nest.ResetKernel() neuron = nest.Create(m) # should raise exception self.assertRaisesRegex( nest.kernel.NESTError, "BadProperty", nest.SetStatus, neuron, {'V_reset': 10., 'V_th': 0.} ) def suite(): suite = unittest.makeSuite(StatusTestCase, 'test') return suite def run(): runner = unittest.TextTestRunner(verbosity=2) runner.run(suite()) if __name__ == "__main__": run()
gpl-2.0
WSDC-NITWarangal/django
tests/utils_tests/test_dateformat.py
263
6177
from __future__ import unicode_literals from datetime import date, datetime from django.test import SimpleTestCase, override_settings from django.test.utils import TZ_SUPPORT, requires_tz_support from django.utils import dateformat, translation from django.utils.dateformat import format from django.utils.timezone import ( get_default_timezone, get_fixed_timezone, make_aware, utc, ) @override_settings(TIME_ZONE='Europe/Copenhagen') class DateFormatTests(SimpleTestCase): def setUp(self): self._orig_lang = translation.get_language() translation.activate('en-us') def tearDown(self): translation.activate(self._orig_lang) def test_date(self): d = date(2009, 5, 16) self.assertEqual(date.fromtimestamp(int(format(d, 'U'))), d) def test_naive_datetime(self): dt = datetime(2009, 5, 16, 5, 30, 30) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt) @requires_tz_support def test_datetime_with_local_tzinfo(self): ltz = get_default_timezone() dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.replace(tzinfo=None)) @requires_tz_support def test_datetime_with_tzinfo(self): tz = get_fixed_timezone(-510) ltz = get_default_timezone() dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz), dt) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt) # astimezone() is safe here because the target timezone doesn't have DST self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.astimezone(ltz).replace(tzinfo=None)) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz).utctimetuple(), dt.utctimetuple()) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz).utctimetuple(), dt.utctimetuple()) def test_epoch(self): udt = datetime(1970, 1, 1, tzinfo=utc) self.assertEqual(format(udt, 'U'), '0') def test_empty_format(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, ''), '') def test_am_pm(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, 'a'), 'p.m.') def test_microsecond(self): # Regression test for #18951 dt = datetime(2009, 5, 16, microsecond=123) self.assertEqual(dateformat.format(dt, 'u'), '000123') def test_date_formats(self): my_birthday = datetime(1979, 7, 8, 22, 00) timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456) self.assertEqual(dateformat.format(my_birthday, 'A'), 'PM') self.assertEqual(dateformat.format(timestamp, 'c'), '2008-05-19T11:45:23.123456') self.assertEqual(dateformat.format(my_birthday, 'd'), '08') self.assertEqual(dateformat.format(my_birthday, 'j'), '8') self.assertEqual(dateformat.format(my_birthday, 'l'), 'Sunday') self.assertEqual(dateformat.format(my_birthday, 'L'), 'False') self.assertEqual(dateformat.format(my_birthday, 'm'), '07') self.assertEqual(dateformat.format(my_birthday, 'M'), 'Jul') self.assertEqual(dateformat.format(my_birthday, 'b'), 'jul') self.assertEqual(dateformat.format(my_birthday, 'n'), '7') self.assertEqual(dateformat.format(my_birthday, 'N'), 'July') def test_time_formats(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, 'P'), '10 p.m.') self.assertEqual(dateformat.format(my_birthday, 's'), '00') self.assertEqual(dateformat.format(my_birthday, 'S'), 'th') self.assertEqual(dateformat.format(my_birthday, 't'), '31') self.assertEqual(dateformat.format(my_birthday, 'w'), '0') self.assertEqual(dateformat.format(my_birthday, 'W'), '27') self.assertEqual(dateformat.format(my_birthday, 'y'), '79') self.assertEqual(dateformat.format(my_birthday, 'Y'), '1979') self.assertEqual(dateformat.format(my_birthday, 'z'), '189') def test_dateformat(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, r'Y z \C\E\T'), '1979 189 CET') self.assertEqual(dateformat.format(my_birthday, r'jS \o\f F'), '8th of July') def test_futuredates(self): the_future = datetime(2100, 10, 25, 0, 00) self.assertEqual(dateformat.format(the_future, r'Y'), '2100') def test_timezones(self): my_birthday = datetime(1979, 7, 8, 22, 00) summertime = datetime(2005, 10, 30, 1, 00) wintertime = datetime(2005, 10, 30, 4, 00) timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456) # 3h30m to the west of UTC tz = get_fixed_timezone(-210) aware_dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz) if TZ_SUPPORT: self.assertEqual(dateformat.format(my_birthday, 'O'), '+0100') self.assertEqual(dateformat.format(my_birthday, 'r'), 'Sun, 8 Jul 1979 22:00:00 +0100') self.assertEqual(dateformat.format(my_birthday, 'T'), 'CET') self.assertEqual(dateformat.format(my_birthday, 'e'), '') self.assertEqual(dateformat.format(aware_dt, 'e'), '-0330') self.assertEqual(dateformat.format(my_birthday, 'U'), '300315600') self.assertEqual(dateformat.format(timestamp, 'u'), '123456') self.assertEqual(dateformat.format(my_birthday, 'Z'), '3600') self.assertEqual(dateformat.format(summertime, 'I'), '1') self.assertEqual(dateformat.format(summertime, 'O'), '+0200') self.assertEqual(dateformat.format(wintertime, 'I'), '0') self.assertEqual(dateformat.format(wintertime, 'O'), '+0100') # Ticket #16924 -- We don't need timezone support to test this self.assertEqual(dateformat.format(aware_dt, 'O'), '-0330')
bsd-3-clause
kejkz/holmium.core
tests/config_tests.py
2
4487
import unittest import json from holmium.core import Config from holmium.core.config import HolmiumConfig, configure class ConfigTests(unittest.TestCase): def test_json_config(self): json_cfg = """ { "default": { "t3": 4, "username":"{{holmium.environment}}user" }, "production": { "t1": 1, "t2": 2 }, "development": { "t1": "{{production['t1']}}", "t2": 3 } } """ cfg = Config(json.loads(json_cfg)) self.assertEqual(cfg["t1"], u"1") self.assertEqual(cfg["t2"], 3) self.assertEqual(cfg["t3"], 4) self.assertEqual(cfg["username"], "developmentuser") holmium_vars = {"holmium":{"environment":"production"}} cfg = Config(json.loads(json_cfg), holmium_vars) self.assertEqual(cfg["t1"], 1) self.assertEqual(cfg["t2"], 2) self.assertEqual(cfg["t3"], 4) self.assertEqual(cfg["username"], "productionuser") def test_dict_config(self): dct_cfg = {"production": {"t2":"{{default['t2']}}", "t3":4}, "development": {"t2":u"{{production['t2']}}"}, "default": {"t1":1,"t2":2,"t3":[1,2,3]}} cfg = Config(dct_cfg) self.assertEqual(cfg["t1"], 1) self.assertEqual(cfg["t2"], u"2") self.assertEqual(cfg["t3"], [1,2,3]) holmium_vars = {"holmium":{"environment":"production"}} cfg = Config(dct_cfg, holmium_vars) self.assertEqual(cfg["t1"], 1) self.assertEqual(cfg["t2"], u"2") self.assertEqual(cfg["t3"], 4) def test_config_update(self): cfg = Config({}) cfg["foo"] = "{{holmium.environment}}" cfg["bar"] = 2 self.assertEqual(cfg["foo"], "development") self.assertEqual(cfg["bar"], 2) def test_config_defaults_only(self): config = { "default":{ "base_url": "http://{{holmium.environment}}:3000", "registration_url": "{{default.base_url}}/users/sign_up", } } cfg = Config(config) self.assertEqual( cfg["registration_url"], u"http://development:3000/users/sign_up") def test_config_default_reference(self): config = { "default":{ "base_url": "http://{{holmium.environment}}:3000", "registration_url": "{{base_url}}/users/sign_up", "random_var":"{{env_random}}" }, "production":{ "base_url": "http://awesomesite.com", "env_random":"1", "extended_random": "random_{{random_var}}" } } holmium_vars = {"holmium":{"environment":"production"}} cfg = Config(config, holmium_vars) self.assertEqual( cfg["registration_url"], u"http://awesomesite.com/users/sign_up") self.assertEqual( cfg["extended_random"], u"random_1") cfg = Config(config) self.assertEqual( cfg["registration_url"], u"http://development:3000/users/sign_up") def test_holmium_config_object(self): cfg = HolmiumConfig(1,2,3,4,5,6,7) self.assertEqual(cfg, { "browser":1, "remote":2, "capabilities":3, "user_agent":4, "environment":5, "ignore_ssl":6, "fresh_instance":7, }) self.assertEqual(cfg.browser, 1) self.assertEqual(cfg.remote, 2) self.assertEqual(cfg.capabilities, 3) self.assertEqual(cfg.user_agent, 4) self.assertEqual(cfg.environment, 5) self.assertEqual(cfg.ignore_ssl, 6) self.assertEqual(cfg.fresh_instance, 7) cfg.browser = 2 self.assertEqual(cfg.browser, 2) self.assertEqual(cfg["browser"],2) nested = {"holmium":{}} nested["holmium"]["config"] = cfg nested["holmium"]["config"]["user_agent"] = 1 self.assertEqual(cfg.user_agent , 1) cfg["foo"] = "bar" self.assertEqual(cfg.foo, "bar") def test_holmium_config_unknown_browser(self): cfg = HolmiumConfig( "awesome", "", {}, "", "development", False, False ) self.assertRaises(RuntimeError, configure, cfg)
mit
bchiroma/DreamProject_2
dream/KnowledgeExtraction/KEtool_examples/TwoParallelStations/TwoParallelStations_example.py
4
4664
''' Created on 20 Jun 2014 @author: Panos ''' # =========================================================================== # Copyright 2013 University of Limerick # # This file is part of DREAM. # # DREAM is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DREAM is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with DREAM. If not, see <http://www.gnu.org/licenses/>. # =========================================================================== from DistributionFitting import DistFittest from DistributionFitting import Distributions from ImportExceldata import Import_Excel from ExcelOutput import Output from ReplaceMissingValues import HandleMissingValues import xlrd import json import dream.simulation.LineGenerationJSON as ManPyMain #import ManPy main JSON script #Read from the given directory the Excel document with the input data workbook = xlrd.open_workbook('inputData.xls') worksheets = workbook.sheet_names() worksheet_ProcessingTimes = worksheets[0] #Define the worksheet with the Processing times data inputData = Import_Excel() #Call the Python object Import_Excel ProcessingTimes = inputData.Input_data(worksheet_ProcessingTimes, workbook) #Create the Processing Times dictionary with key Machines 1,2 and values the processing time data ##Get from the above dictionaries the M1 key and define the following lists with data M1_ProcTime = ProcessingTimes.get('M1',[]) M2_ProcTime = ProcessingTimes.get('M2',[]) #Call the HandleMissingValues object and replace the missing values in the lists with the mean of the non-missing values misValues =HandleMissingValues() M1_ProcTime = misValues.ReplaceWithMean(M1_ProcTime) M2_ProcTime = misValues.ReplaceWithMean(M2_ProcTime) MLE = Distributions() #Call the Distributions object (Maximum Likelihood Estimation - MLE) KS = DistFittest() #Call the DistFittest object (Kolmoghorov-Smirnov test) M1ProcTime_dist = KS.ks_test(M1_ProcTime) M2ProcTime_dist = MLE.Normal_distrfit(M2_ProcTime) #======================= Output preparation: output the updated values in the JSON file of this example ================================# jsonFile = open('JSON_TwoParallelStations.json','r') #It opens the JSON file data = json.load(jsonFile) #It loads the file jsonFile.close() nodes = data.get('nodes',[]) #It creates a variable that holds the 'nodes' dictionary for element in nodes: processingTime = nodes[element].get('processingTime',{}) #It creates a variable that gets the element attribute 'processingTime' if element == 'St1': nodes['St1']['processingTime'] = M1ProcTime_dist #It checks using if syntax if the element is 'M1' elif element == 'St2': nodes['St2']['processingTime'] = M2ProcTime_dist #It checks using if syntax if the element is 'M2' jsonFile = open('JSON_ParallelStations_Output.json',"w") #It opens the JSON file jsonFile.write(json.dumps(data, indent=True)) #It writes the updated data to the JSON file jsonFile.close() #It closes the file #=================== Calling the ExcelOutput object, outputs the outcomes of the statistical analysis in xls files ==========================# export=Output() export.PrintStatisticalMeasures(M1_ProcTime,'M1_ProcTime_StatResults.xls') export.PrintStatisticalMeasures(M2_ProcTime,'M2_ProcTime_StatResults.xls') export.PrintDistributionFit(M1_ProcTime,'M1_ProcTime_DistFitResults.xls') export.PrintDistributionFit(M2_ProcTime,'M2_ProcTime_DistFitResults.xls') #calls ManPy main script with the input simulationOutput=ManPyMain.main(input_data=json.dumps(data)) # save the simulation output jsonFile = open('ManPyOutput.json',"w") #It opens the JSON file jsonFile.write(simulationOutput) #It writes the updated data to the JSON file jsonFile.close() #It closes the file
gpl-3.0
eestay/edx-ora2
openassessment/assessment/test/test_ai_worker.py
8
14632
# -*- coding: utf-8 -*- """ Tests for AI worker API calls. """ import copy import datetime from uuid import uuid4 import mock from django.db import DatabaseError from django.core.files.base import ContentFile from submissions import api as sub_api from openassessment.test_utils import CacheResetTest from openassessment.assessment.api import ai_worker as ai_worker_api from openassessment.assessment.models import ( AITrainingWorkflow, AIGradingWorkflow, AIClassifier, AIClassifierSet, Assessment ) from openassessment.assessment.serializers import ( rubric_from_dict, deserialize_training_examples ) from openassessment.assessment.errors import ( AITrainingRequestError, AITrainingInternalError, AIGradingRequestError, AIGradingInternalError ) from openassessment.assessment.test.constants import ( EXAMPLES, RUBRIC, STUDENT_ITEM, ANSWER ) ALGORITHM_ID = "test-algorithm" # Classifier data # Since this is controlled by the AI algorithm implementation, # we could put anything here as long as it's JSON-serializable. CLASSIFIERS = { u"vøȼȺƀᵾłȺɍɏ": { 'name': u'𝒕𝒆𝒔𝒕 𝒄𝒍𝒂𝒔𝒔𝒊𝒇𝒊𝒆𝒓', 'data': u'Öḧ ḷëẗ ẗḧë ṡüṅ ḅëäẗ ḋöẅṅ üṗöṅ ṁÿ ḟäċë, ṡẗäṛṡ ẗö ḟïḷḷ ṁÿ ḋṛëäṁ"' }, u"ﻭɼค๓๓คɼ": { 'name': u'𝒕𝒆𝒔𝒕 𝒄𝒍𝒂𝒔𝒔𝒊𝒇𝒊𝒆𝒓', 'data': u"І ам а тѓаvэlэѓ оf ъотЂ тімэ аиↁ ѕрасэ, то ъэ шЂэѓэ І Ђаvэ ъээи" } } class AIWorkerTrainingTest(CacheResetTest): """ Tests for the AI API calls a worker would make when completing a training task. """ COURSE_ID = u"sämplë ċöürsë" ITEM_ID = u"12231" ALGORITHM_ID = "test-algorithm" # Classifier data # Since this is controlled by the AI algorithm implementation, # we could put anything here as long as it's JSON-serializable. CLASSIFIERS = { u"vøȼȺƀᵾłȺɍɏ": { 'name': u'𝒕𝒆𝒔𝒕 𝒄𝒍𝒂𝒔𝒔𝒊𝒇𝒊𝒆𝒓', 'data': u'Öḧ ḷëẗ ẗḧë ṡüṅ ḅëäẗ ḋöẅṅ üṗöṅ ṁÿ ḟäċë, ṡẗäṛṡ ẗö ḟïḷḷ ṁÿ ḋṛëäṁ"' }, u"ﻭɼค๓๓คɼ": { 'name': u'𝒕𝒆𝒔𝒕 𝒄𝒍𝒂𝒔𝒔𝒊𝒇𝒊𝒆𝒓', 'data': u"І ам а тѓаvэlэѓ оf ъотЂ тімэ аиↁ ѕрасэ, то ъэ шЂэѓэ І Ђаvэ ъээи" } } def setUp(self): """ Create a training workflow in the database. """ examples = deserialize_training_examples(EXAMPLES, RUBRIC) workflow = AITrainingWorkflow.start_workflow(examples, self.COURSE_ID, self.ITEM_ID, self.ALGORITHM_ID) self.workflow_uuid = workflow.uuid def test_get_training_task_params(self): params = ai_worker_api.get_training_task_params(self.workflow_uuid) expected_examples = [ { 'text': EXAMPLES[0]['answer'], 'scores': { u"vøȼȺƀᵾłȺɍɏ": 1, u"ﻭɼค๓๓คɼ": 0 } }, { 'text': EXAMPLES[1]['answer'], 'scores': { u"vøȼȺƀᵾłȺɍɏ": 0, u"ﻭɼค๓๓คɼ": 2 } }, ] self.assertItemsEqual(params['training_examples'], expected_examples) self.assertItemsEqual(params['algorithm_id'], ALGORITHM_ID) def test_get_training_task_params_no_workflow(self): with self.assertRaises(AITrainingRequestError): ai_worker_api.get_training_task_params("invalid_uuid") @mock.patch.object(AITrainingWorkflow.objects, 'get') def test_get_training_task_params_database_error(self, mock_get): mock_get.side_effect = DatabaseError("KABOOM!") with self.assertRaises(AITrainingInternalError): ai_worker_api.get_training_task_params(self.workflow_uuid) def test_create_classifiers(self): ai_worker_api.create_classifiers(self.workflow_uuid, CLASSIFIERS) # Expect that the workflow was marked complete workflow = AITrainingWorkflow.objects.get(uuid=self.workflow_uuid) self.assertIsNot(workflow.completed_at, None) # Expect that the classifier set was created with the correct data self.assertIsNot(workflow.classifier_set, None) saved_classifiers = workflow.classifier_set.classifier_data_by_criterion self.assertItemsEqual(CLASSIFIERS, saved_classifiers) def test_create_classifiers_no_workflow(self): with self.assertRaises(AITrainingRequestError): ai_worker_api.create_classifiers("invalid_uuid", CLASSIFIERS) @mock.patch.object(AITrainingWorkflow.objects, 'get') def test_create_classifiers_database_error(self, mock_get): mock_get.side_effect = DatabaseError("KABOOM!") with self.assertRaises(AITrainingInternalError): ai_worker_api.create_classifiers(self.workflow_uuid, CLASSIFIERS) def test_create_classifiers_serialize_error(self): # Mutate the classifier data so it is NOT JSON-serializable classifiers = copy.deepcopy(CLASSIFIERS) classifiers[u"vøȼȺƀᵾłȺɍɏ"] = datetime.datetime.now() # Expect an error when we try to create the classifiers with self.assertRaises(AITrainingInternalError): ai_worker_api.create_classifiers(self.workflow_uuid, classifiers) def test_create_classifiers_missing_criteria(self): # Remove a criterion from the classifiers dict classifiers = copy.deepcopy(CLASSIFIERS) del classifiers[u"vøȼȺƀᵾłȺɍɏ"] # Expect an error when we try to create the classifiers with self.assertRaises(AITrainingRequestError): ai_worker_api.create_classifiers(self.workflow_uuid, classifiers) def test_create_classifiers_unrecognized_criterion(self): # Add an extra criterion to the classifiers dict classifiers = copy.deepcopy(CLASSIFIERS) classifiers[u"extra_criterion"] = copy.deepcopy(classifiers[u"vøȼȺƀᵾłȺɍɏ"]) # Expect an error when we try to create the classifiers with self.assertRaises(AITrainingRequestError): ai_worker_api.create_classifiers(self.workflow_uuid, classifiers) @mock.patch.object(AIClassifier, 'classifier_data') def test_create_classifiers_upload_error(self, mock_data): # Simulate an error occurring when uploading the trained classifier mock_data.save.side_effect = IOError("OH NO!!!") with self.assertRaises(AITrainingInternalError): ai_worker_api.create_classifiers(self.workflow_uuid, CLASSIFIERS) def test_create_classifiers_twice(self): # Simulate repeated task execution for the same workflow # Since these are executed sequentially, the second call should # have no effect. ai_worker_api.create_classifiers(self.workflow_uuid, CLASSIFIERS) ai_worker_api.create_classifiers(self.workflow_uuid, CLASSIFIERS) # Expect that the workflow was marked complete workflow = AITrainingWorkflow.objects.get(uuid=self.workflow_uuid) self.assertIsNot(workflow.completed_at, None) # Expect that the classifier set was created with the correct data self.assertIsNot(workflow.classifier_set, None) saved_classifiers = workflow.classifier_set.classifier_data_by_criterion self.assertItemsEqual(CLASSIFIERS, saved_classifiers) def test_create_classifiers_no_training_examples(self): # Create a workflow with no training examples workflow = AITrainingWorkflow.objects.create(algorithm_id=ALGORITHM_ID) # Expect an error when we try to create classifiers with self.assertRaises(AITrainingInternalError): ai_worker_api.create_classifiers(workflow.uuid, CLASSIFIERS) def test_is_workflow_complete(self): self.assertFalse(ai_worker_api.is_training_workflow_complete(self.workflow_uuid)) workflow = AITrainingWorkflow.objects.get(uuid=self.workflow_uuid) workflow.mark_complete_and_save() self.assertTrue(ai_worker_api.is_training_workflow_complete(self.workflow_uuid)) def test_is_workflow_complete_no_such_workflow(self): with self.assertRaises(AITrainingRequestError): ai_worker_api.is_training_workflow_complete('no such workflow') @mock.patch.object(AITrainingWorkflow.objects, 'get') def test_is_workflow_complete_database_error(self, mock_call): mock_call.side_effect = DatabaseError("Oh no!") with self.assertRaises(AITrainingInternalError): ai_worker_api.is_training_workflow_complete(self.workflow_uuid) class AIWorkerGradingTest(CacheResetTest): """ Tests for the AI API calls a worker would make when completing a grading task. """ SCORES = { u"vøȼȺƀᵾłȺɍɏ": 1, u"ﻭɼค๓๓คɼ": 0 } def setUp(self): """ Create a grading workflow in the database. """ # Create a submission submission = sub_api.create_submission(STUDENT_ITEM, ANSWER) self.submission_uuid = submission['uuid'] # Create a workflow for the submission workflow = AIGradingWorkflow.start_workflow(self.submission_uuid, RUBRIC, ALGORITHM_ID) self.workflow_uuid = workflow.uuid # Associate the workflow with classifiers rubric = rubric_from_dict(RUBRIC) classifier_set = AIClassifierSet.create_classifier_set( CLASSIFIERS, rubric, ALGORITHM_ID, STUDENT_ITEM.get('course_id'), STUDENT_ITEM.get('item_id') ) workflow.classifier_set = classifier_set workflow.save() def test_get_grading_task_params(self): params = ai_worker_api.get_grading_task_params(self.workflow_uuid) expected_params = { 'essay_text': ANSWER, 'classifier_set': CLASSIFIERS, 'algorithm_id': ALGORITHM_ID, 'valid_scores': { u"vøȼȺƀᵾłȺɍɏ": [0, 1, 2], u"ﻭɼค๓๓คɼ": [0, 1, 2] } } self.assertItemsEqual(params, expected_params) def test_get_grading_task_params_num_queries(self): with self.assertNumQueries(6): ai_worker_api.get_grading_task_params(self.workflow_uuid) # The second time through we should be caching the queries # to determine the valid scores for a classifier with self.assertNumQueries(2): ai_worker_api.get_grading_task_params(self.workflow_uuid) def test_get_grading_task_params_no_workflow(self): with self.assertRaises(AIGradingRequestError): ai_worker_api.get_grading_task_params("invalid_uuid") def test_get_grading_task_params_no_classifiers(self): # Remove the classifiers from the workflow workflow = AIGradingWorkflow.objects.get(uuid=self.workflow_uuid) workflow.classifier_set = None workflow.save() # Should get an error when retrieving task params with self.assertRaises(AIGradingInternalError): ai_worker_api.get_grading_task_params(self.workflow_uuid) @mock.patch.object(AIGradingWorkflow.objects, 'get') def test_get_grading_task_params_database_error(self, mock_call): mock_call.side_effect = DatabaseError("KABOOM!") with self.assertRaises(AIGradingInternalError): ai_worker_api.get_grading_task_params(self.submission_uuid) def test_invalid_classifier_data(self): # Modify the classifier data so it is not valid JSON invalid_json = "{" for classifier in AIClassifier.objects.all(): classifier.classifier_data.save(uuid4().hex, ContentFile(invalid_json)) # Should get an error when retrieving task params with self.assertRaises(AIGradingInternalError): ai_worker_api.get_grading_task_params(self.workflow_uuid) def test_create_assessment(self): ai_worker_api.create_assessment(self.workflow_uuid, self.SCORES) assessment = Assessment.objects.get(submission_uuid=self.submission_uuid) self.assertEqual(assessment.points_earned, 1) def test_create_assessment_no_workflow(self): with self.assertRaises(AIGradingRequestError): ai_worker_api.create_assessment("invalid_uuid", self.SCORES) def test_create_assessment_workflow_already_complete(self): # Try to create assessments for the same workflow multiple times ai_worker_api.create_assessment(self.workflow_uuid, self.SCORES) ai_worker_api.create_assessment(self.workflow_uuid, self.SCORES) # Expect that only one assessment is created for the submission num_assessments = Assessment.objects.filter(submission_uuid=self.submission_uuid).count() self.assertEqual(num_assessments, 1) @mock.patch.object(AIGradingWorkflow.objects, 'get') def test_create_assessment_database_error_retrieving_workflow(self, mock_call): mock_call.side_effect = DatabaseError("KABOOM!") with self.assertRaises(AIGradingInternalError): ai_worker_api.create_assessment(self.workflow_uuid, self.SCORES) @mock.patch.object(Assessment.objects, 'create') def test_create_assessment_database_error_complete_workflow(self, mock_call): mock_call.side_effect = DatabaseError("KABOOM!") with self.assertRaises(AIGradingInternalError): ai_worker_api.create_assessment(self.workflow_uuid, self.SCORES) def test_is_workflow_complete(self): self.assertFalse(ai_worker_api.is_grading_workflow_complete(self.workflow_uuid)) workflow = AIGradingWorkflow.objects.get(uuid=self.workflow_uuid) workflow.mark_complete_and_save() self.assertTrue(ai_worker_api.is_grading_workflow_complete(self.workflow_uuid)) def test_is_workflow_complete_no_such_workflow(self): with self.assertRaises(AIGradingRequestError): ai_worker_api.is_grading_workflow_complete('no such workflow') @mock.patch.object(AIGradingWorkflow.objects, 'get') def test_is_workflow_complete_database_error(self, mock_call): mock_call.side_effect = DatabaseError("Oh no!") with self.assertRaises(AIGradingInternalError): ai_worker_api.is_grading_workflow_complete(self.workflow_uuid)
agpl-3.0
seanchen/taiga-back
taiga/base/api/utils/mediatypes.py
18
3603
# Copyright (C) 2015 Andrey Antukh <niwi@niwi.be> # Copyright (C) 2015 Jesús Espino <jespinog@gmail.com> # Copyright (C) 2015 David Barragán <bameda@dbarragan.com> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # This code is partially taken from django-rest-framework: # Copyright (c) 2011-2014, Tom Christie """ Handling of media types, as found in HTTP Content-Type and Accept headers. See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7 """ from django.http.multipartparser import parse_header from taiga.base.api import HTTP_HEADER_ENCODING def media_type_matches(lhs, rhs): """ Returns ``True`` if the media type in the first argument <= the media type in the second argument. The media types are strings as described by the HTTP spec. Valid media type strings include: 'application/json; indent=4' 'application/json' 'text/*' '*/*' """ lhs = _MediaType(lhs) rhs = _MediaType(rhs) return lhs.match(rhs) def order_by_precedence(media_type_lst): """ Returns a list of sets of media type strings, ordered by precedence. Precedence is determined by how specific a media type is: 3. 'type/subtype; param=val' 2. 'type/subtype' 1. 'type/*' 0. '*/*' """ ret = [set(), set(), set(), set()] for media_type in media_type_lst: precedence = _MediaType(media_type).precedence ret[3 - precedence].add(media_type) return [media_types for media_types in ret if media_types] class _MediaType(object): def __init__(self, media_type_str): if media_type_str is None: media_type_str = '' self.orig = media_type_str self.full_type, self.params = parse_header(media_type_str.encode(HTTP_HEADER_ENCODING)) self.main_type, sep, self.sub_type = self.full_type.partition("/") def match(self, other): """Return true if this MediaType satisfies the given MediaType.""" for key in self.params.keys(): if key != "q" and other.params.get(key, None) != self.params.get(key, None): return False if self.sub_type != "*" and other.sub_type != "*" and other.sub_type != self.sub_type: return False if self.main_type != "*" and other.main_type != "*" and other.main_type != self.main_type: return False return True @property def precedence(self): """ Return a precedence level from 0-3 for the media type given how specific it is. """ if self.main_type == "*": return 0 elif self.sub_type == "*": return 1 elif not self.params or self.params.keys() == ["q"]: return 2 return 3 def __str__(self): return unicode(self).encode("utf-8") def __unicode__(self): ret = "%s/%s" % (self.main_type, self.sub_type) for key, val in self.params.items(): ret += "; %s=%s" % (key, val) return ret
agpl-3.0
bennoleslie/gdb
gdb/testsuite/gdb.python/py-section-script.py
23
1961
# Copyright (C) 2010-2012 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # This file is part of the GDB testsuite. import re class pp_ss: def __init__(self, val): self.val = val def to_string(self): return "a=<" + str(self.val["a"]) + "> b=<" + str(self.val["b"]) + ">" def lookup_function (val): "Look-up and return a pretty-printer that can print val." # Get the type. type = val.type # If it points to a reference, get the reference. if type.code == gdb.TYPE_CODE_REF: type = type.target () # Get the unqualified type, stripped of typedefs. type = type.unqualified ().strip_typedefs () # Get the type name. typename = type.tag if typename == None: return None # Iterate over local dictionary of types to determine # if a printer is registered for that type. Return an # instantiation of the printer if found. for function in pretty_printers_dict: if function.match (typename): return pretty_printers_dict[function] (val) # Cannot find a pretty printer. Return None. return None def register_pretty_printers (): pretty_printers_dict[re.compile ('^ss$')] = pp_ss pretty_printers_dict = {} register_pretty_printers () gdb.current_progspace().pretty_printers.append (lookup_function)
gpl-2.0
crasker/scrapy
scrapy/commands/__init__.py
38
3532
""" Base class for Scrapy commands """ import os from optparse import OptionGroup from twisted.python import failure from scrapy.utils.conf import arglist_to_dict from scrapy.exceptions import UsageError class ScrapyCommand(object): requires_project = False crawler_process = None # default settings to be used for this command instead of global defaults default_settings = {} exitcode = 0 def __init__(self): self.settings = None # set in scrapy.cmdline def set_crawler(self, crawler): assert not hasattr(self, '_crawler'), "crawler already set" self._crawler = crawler def syntax(self): """ Command syntax (preferably one-line). Do not include command name. """ return "" def short_desc(self): """ A short description of the command """ return "" def long_desc(self): """A long description of the command. Return short description when not available. It cannot contain newlines, since contents will be formatted by optparser which removes newlines and wraps text. """ return self.short_desc() def help(self): """An extensive help for the command. It will be shown when using the "help" command. It can contain newlines, since not post-formatting will be applied to its contents. """ return self.long_desc() def add_options(self, parser): """ Populate option parse with options available for this command """ group = OptionGroup(parser, "Global Options") group.add_option("--logfile", metavar="FILE", help="log file. if omitted stderr will be used") group.add_option("-L", "--loglevel", metavar="LEVEL", default=None, help="log level (default: %s)" % self.settings['LOG_LEVEL']) group.add_option("--nolog", action="store_true", help="disable logging completely") group.add_option("--profile", metavar="FILE", default=None, help="write python cProfile stats to FILE") group.add_option("--pidfile", metavar="FILE", help="write process ID to FILE") group.add_option("-s", "--set", action="append", default=[], metavar="NAME=VALUE", help="set/override setting (may be repeated)") group.add_option("--pdb", action="store_true", help="enable pdb on failure") parser.add_option_group(group) def process_options(self, args, opts): try: self.settings.setdict(arglist_to_dict(opts.set), priority='cmdline') except ValueError: raise UsageError("Invalid -s value, use -s NAME=VALUE", print_help=False) if opts.logfile: self.settings.set('LOG_ENABLED', True, priority='cmdline') self.settings.set('LOG_FILE', opts.logfile, priority='cmdline') if opts.loglevel: self.settings.set('LOG_ENABLED', True, priority='cmdline') self.settings.set('LOG_LEVEL', opts.loglevel, priority='cmdline') if opts.nolog: self.settings.set('LOG_ENABLED', False, priority='cmdline') if opts.pidfile: with open(opts.pidfile, "w") as f: f.write(str(os.getpid()) + os.linesep) if opts.pdb: failure.startDebugMode() def run(self, args, opts): """ Entry point for running commands """ raise NotImplementedError
bsd-3-clause
sbt9uc/osf.io
scripts/add_conference.py
61
2995
#!/usr/bin/env python # encoding: utf-8 from website.conferences.model import Conference from website.models import User from modularodm import Q from modularodm.exceptions import ModularOdmException from website.app import init_app import sys import argparse def main(): init_app(set_backends=True, routes=False) args = parse_args() add_conference( endpoint=args.endpoint, name=args.name, active=args.active, info_url=args.info_url, logo_url=args.logo_url, admins=args.admins, public_projects=args.public_projects ) def add_conference(endpoint, name, active, admins, info_url=None, logo_url=None, public_projects=None): try: admin_users = [ User.find_one(Q('username', 'iexact', admin)) for admin in admins ] except ModularOdmException: raise RuntimeError("Admin must be a current registered user on the OSF.") conf = Conference( endpoint=endpoint, name=name, active=active, info_url=info_url, logo_url=logo_url, admins=admin_users ) try: conf.save() except ModularOdmException: raise RuntimeError("Conference already exists.") def parse_args(): parser = argparse.ArgumentParser(description='Create new conference.') parser.add_argument('-e', '--endpoint', dest='endpoint', required=True) parser.add_argument('--n', '--name', dest='name', required=True) parser.add_argument('--active', dest='active', type=bool, default=False) parser.add_argument('--i_url', '--info_url', dest='info_url') parser.add_argument('--l_url', '--logo_url', dest='logo_url') parser.add_argument('--admins', dest='admins', nargs='+') parser.add_argument('--public', '--public_projects', dest='public_projects', type=bool, default=None) return parser.parse_args() from nose.tools import * # noqa from tests.base import OsfTestCase from tests.factories import UserFactory from tests.test_conferences import ConferenceFactory class TestAddConference(OsfTestCase): def test_add_conference(self): user = UserFactory() add_conference('spsp2014', name='SPSP', admins=[user.username], active=True) conf = Conference.find_one(Q('endpoint', 'eq', 'spsp2014')) assert_equal(conf.name, 'SPSP') assert_true(conf.active) assert_in(user, conf.admins) def test_add_conference_when_admin_user_not_registered(self): with assert_raises(RuntimeError): add_conference('spsp2014', name='SPSP', admins=['unregister@hotmail.com'], active=True) def test_add_conference_when_conference_already_exists(self): conf = ConferenceFactory() user = UserFactory() with assert_raises(RuntimeError): add_conference(conf.endpoint, name='SPSP', admins=[user.username], active=True) if __name__ == '__main__': main()
apache-2.0
saxix/django-scheduler
schedule/conf/settings.py
1
2293
from django.utils.translation import ugettext_lazy from django.core.exceptions import ImproperlyConfigured from annoying.functions import get_config fdow_default = 0 # Sunday # Look for FIRST_DAY_OF_WEEK as a locale setting fdow = ugettext_lazy('FIRST_DAY_OF_WEEK') try: FIRST_DAY_OF_WEEK = int(str(fdow)) except ValueError: # Let's try our settings fdow = get_config('FIRST_DAY_OF_WEEK', fdow_default) FIRST_DAY_OF_WEEK = int(fdow) except ValueError: raise ImproperlyConfigured("FIRST_DAY_OF_WEEK must be an integer between 0 and 6") AUTH_USER_MODEL = get_config('AUTH_USER_MODEL') # whether to display cancelled occurrences # (if they are displayed then they have a css class "cancelled") # this controls behaviour of Period.classify_occurrence method SHOW_CANCELLED_OCCURRENCES = get_config('SHOW_CANCELLED_OCCURRENCES', False) # Callable used to check if a user has edit permissions to event # (and occurrence). Used by check_edit_permission decorator # if ob==None we check permission to add occurrence CHECK_EVENT_PERM_FUNC = get_config('CHECK_EVENT_PERM_FUNC', None) if not CHECK_EVENT_PERM_FUNC: CHECK_EVENT_PERM_FUNC = get_config('CHECK_PERMISSION_FUNC', None) if not CHECK_EVENT_PERM_FUNC: def check_event_permission(ob, user): return user.is_authenticated() CHECK_EVENT_PERM_FUNC = check_event_permission # Callable used to check if a user has edit permissions to calendar CHECK_CALENDAR_PERM_FUNC = get_config('CHECK_CALENDAR_PERM_FUNC', None) if not CHECK_CALENDAR_PERM_FUNC: def check_calendar_permission(ob, user): return user.is_authenticated() CHECK_CALENDAR_PERM_FUNC = check_calendar_permission # Callable used to customize the event list given for a calendar and user # (e.g. all events on that calendar, those events plus another calendar's events, # or the events filtered based on user permissions) # Imports have to be placed within the function body to avoid circular imports GET_EVENTS_FUNC = get_config('GET_EVENTS_FUNC', None) if not GET_EVENTS_FUNC: def get_events(request, calendar): return calendar.event_set.all() GET_EVENTS_FUNC = get_events # URL to redirect to to after an occurrence is canceled OCCURRENCE_CANCEL_REDIRECT = get_config('OCCURRENCE_CANCEL_REDIRECT', None)
bsd-3-clause
pathawks/pygments
pygments/styles/vim.py
361
1976
# -*- coding: utf-8 -*- """ pygments.styles.vim ~~~~~~~~~~~~~~~~~~~ A highlighting style for Pygments, inspired by vim. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Generic, Whitespace, Token class VimStyle(Style): """ Styles somewhat like vim 7.0 """ background_color = "#000000" highlight_color = "#222222" default_style = "#cccccc" styles = { Token: "#cccccc", Whitespace: "", Comment: "#000080", Comment.Preproc: "", Comment.Special: "bold #cd0000", Keyword: "#cdcd00", Keyword.Declaration: "#00cd00", Keyword.Namespace: "#cd00cd", Keyword.Pseudo: "", Keyword.Type: "#00cd00", Operator: "#3399cc", Operator.Word: "#cdcd00", Name: "", Name.Class: "#00cdcd", Name.Builtin: "#cd00cd", Name.Exception: "bold #666699", Name.Variable: "#00cdcd", String: "#cd0000", Number: "#cd00cd", Generic.Heading: "bold #000080", Generic.Subheading: "bold #800080", Generic.Deleted: "#cd0000", Generic.Inserted: "#00cd00", Generic.Error: "#FF0000", Generic.Emph: "italic", Generic.Strong: "bold", Generic.Prompt: "bold #000080", Generic.Output: "#888", Generic.Traceback: "#04D", Error: "border:#FF0000" }
bsd-2-clause
CodieCodemonkey/PyCog
tests/pycog/test_statemachine.py
1
1668
"""Test pycog.statemachine""" import sys import os.path as op # Need this so we pick up the code base for which this is a test, not an # installed version. package_dir = op.abspath(op.join('..', 'packages')) if package_dir not in sys.path: sys.path.insert(0, package_dir) example_dir = op.abspath(op.join('..', 'examples')) if example_dir not in sys.path: sys.path.insert(0, example_dir) import unittest from io import StringIO import eight_queens from ps_and_qs import PsAndQs from min_change import MinimalChange class EightQueensTest(unittest.TestCase): def test_eightqueens(self): solver = eight_queens.EightQueens() solver.run() class PsAndQsTest(unittest.TestCase): def test_pppqqqqq(self): test = PsAndQs(StringIO("pppqqqqq")) self.assertTrue(test.run()) def test_qqqqpppp(self): test = PsAndQs(StringIO("qqqqpppp")) self.assertFalse(test.run()) def test_rppppqqqq(self): test = PsAndQs(StringIO("rppppqqqq")) self.assertFalse(test.run()) def test_pppprqqqq(self): test = PsAndQs(StringIO("pppprqqqq")) self.assertFalse(test.run()) def test_ppppqqqqr(self): test = PsAndQs(StringIO("ppppqqqqr")) self.assertFalse(test.run()) def test_qqqqqqq(self): test = PsAndQs(StringIO("qqqqqqq")) self.assertTrue(test.run()) def test_pppppp(self): test = PsAndQs(StringIO("pppppp")) self.assertTrue(test.run()) class MinChange(unittest.TestCase): def test_min_change(self): fsm = MinimalChange(35, [1, 3, 5, 7, 11, 13]) fsm.run() self.assertEqual(fsm.fewest, 3)
lgpl-3.0
ktaneishi/deepchem
contrib/mpnn/mpnn.py
5
5567
# 2017 DeepCrystal Technologies - Patrick Hop # # Message Passing Neural Network SELU [MPNN-S] for Chemical Multigraphs # # MIT License - have fun!! # =========================================================== import math import deepchem as dc from rdkit import Chem, DataStructs from rdkit.Chem import AllChem import torch import torch.nn as nn from torch.autograd import Variable import torch.optim as optim import torch.nn.functional as F from sklearn.metrics import r2_score from sklearn.ensemble import RandomForestRegressor from sklearn import preprocessing import numpy as np import random from collections import OrderedDict from scipy.stats import pearsonr import donkey random.seed(2) torch.manual_seed(2) np.random.seed(2) DATASET = 'az_ppb.csv' print(DATASET) T = 3 BATCH_SIZE = 48 MAXITER = 40000 LIMIT = 0 LR = 5e-4 R = nn.Linear(150, 128) U = {0: nn.Linear(156, 75), 1: nn.Linear(156, 75), 2: nn.Linear(156, 75)} V = {0: nn.Linear(75, 75), 1: nn.Linear(75, 75), 2: nn.Linear(75, 75)} E = nn.Linear(6, 6) def adjust_learning_rate(optimizer, epoch): """Sets the learning rate to the initial LR decayed by .8 every 5 epochs""" lr = LR * (0.9 ** (epoch // 10)) print('new lr [%.5f]' % lr) for param_group in optimizer.param_groups: param_group['lr'] = lr def load_dataset(): train_features, train_labels, val_features, val_labels = donkey.load_dataset(DATASET) scaler = preprocessing.StandardScaler().fit(train_labels) train_labels = scaler.transform(train_labels) val_labels = scaler.transform(val_labels) train_labels = Variable(torch.FloatTensor(train_labels), requires_grad=False) val_labels = Variable(torch.FloatTensor(val_labels), requires_grad=False) return train_features, train_labels, val_features, val_labels def readout(h, h2): catted_reads = map(lambda x: torch.cat([h[x[0]], h2[x[1]]], 1), zip(h2.keys(), h.keys())) activated_reads = map(lambda x: F.selu( R(x) ), catted_reads) readout = Variable(torch.zeros(1, 128)) for read in activated_reads: readout = readout + read return F.tanh( readout ) def message_pass(g, h, k): for v in g.keys(): neighbors = g[v] for neighbor in neighbors: e_vw = neighbor[0] # feature variable w = neighbor[1] m_w = V[k](h[w]) m_e_vw = E(e_vw) reshaped = torch.cat( (h[v], m_w, m_e_vw), 1) h[v] = F.selu(U[k](reshaped)) def construct_multigraph(smile): g = OrderedDict({}) h = OrderedDict({}) molecule = Chem.MolFromSmiles(smile) for i in xrange(0, molecule.GetNumAtoms()): atom_i = molecule.GetAtomWithIdx(i) h[i] = Variable(torch.FloatTensor(dc.feat.graph_features.atom_features(atom_i))).view(1, 75) for j in xrange(0, molecule.GetNumAtoms()): e_ij = molecule.GetBondBetweenAtoms(i, j) if e_ij != None: e_ij = map(lambda x: 1 if x == True else 0, dc.feat.graph_features.bond_features(e_ij)) # ADDED edge feat e_ij = Variable(torch.FloatTensor(e_ij).view(1, 6)) atom_j = molecule.GetAtomWithIdx(j) if i not in g: g[i] = [] g[i].append( (e_ij, j) ) return g, h train_smiles, train_labels, val_smiles, val_labels = load_dataset() linear = nn.Linear(128, 1) params = [{'params': R.parameters()}, {'params': U[0].parameters()}, {'params': U[1].parameters()}, {'params': U[2].parameters()}, {'params': E.parameters()}, {'params': V[0].parameters()}, {'params': V[1].parameters()}, {'params': V[2].parameters()}, {'params': linear.parameters()}] num_epoch = 0 optimizer = optim.Adam(params, lr=LR, weight_decay=1e-4) for i in xrange(0, MAXITER): optimizer.zero_grad() train_loss = Variable(torch.zeros(1, 1)) y_hats_train = [] for j in xrange(0, BATCH_SIZE): sample_index = random.randint(0, len(train_smiles) - 2) smile = train_smiles[sample_index] g, h = construct_multigraph(smile) # TODO: cache this g2, h2 = construct_multigraph(smile) for k in xrange(0, T): message_pass(g, h, k) x = readout(h, h2) #x = F.selu( fc(x) ) y_hat = linear(x) y = train_labels[sample_index] y_hats_train.append(y_hat) error = (y_hat - y)*(y_hat - y) / Variable(torch.FloatTensor([BATCH_SIZE])).view(1, 1) train_loss = train_loss + error train_loss.backward() optimizer.step() if i % int(len(train_smiles) / BATCH_SIZE) == 0: val_loss = Variable(torch.zeros(1, 1), requires_grad=False) y_hats_val = [] for j in xrange(0, len(val_smiles)): g, h = construct_multigraph(val_smiles[j]) g2, h2 = construct_multigraph(val_smiles[j]) for k in xrange(0, T): message_pass(g, h, k) x = readout(h, h2) #x = F.selu( fc(x) ) y_hat = linear(x) y = val_labels[j] y_hats_val.append(y_hat) error = (y_hat - y)*(y_hat - y) / Variable(torch.FloatTensor([len(val_smiles)])).view(1, 1) val_loss = val_loss + error y_hats_val = np.array(map(lambda x: x.data.numpy(), y_hats_val)) y_val = np.array(map(lambda x: x.data.numpy(), val_labels)) y_hats_val = y_hats_val.reshape(-1, 1) y_val = y_val.reshape(-1, 1) r2_val_old = r2_score(y_val, y_hats_val) r2_val_new = pearsonr(y_val, y_hats_val)[0]**2 train_loss_ = train_loss.data.numpy()[0] val_loss_ = val_loss.data.numpy()[0] print 'epoch [%i/%i] train_loss [%f] val_loss [%f] r2_val_old [%.4f], r2_val_new [%.4f]' \ % (num_epoch, 100, train_loss_, val_loss_, r2_val_old, r2_val_new) num_epoch += 1
mit
jeremyfix/pylearn2
pylearn2/sandbox/cuda_convnet/debug.py
41
3019
__authors__ = "Ian Goodfellow" __copyright__ = "Copyright 2010-2012, Universite de Montreal" __credits__ = ["Ian Goodfellow"] __license__ = "3-clause BSD" __maintainer__ = "LISA Lab" __email__ = "pylearn-dev@googlegroups" from pylearn2.testing.skip import skip_if_no_gpu skip_if_no_gpu() import logging import numpy as np from theano import shared from pylearn2.sandbox.cuda_convnet.filter_acts import FilterActs from theano.sandbox.cuda import gpu_from_host from theano.sandbox.cuda import host_from_gpu from theano.tensor.nnet.conv import conv2d from theano import function def main(): logger = logging.getLogger(__name__) # Tests that running FilterActs with no padding is the same as running # theano's conv2D in valid mode rng = np.random.RandomState([2012, 10, 9]) batch_size = 128 rows = 32 cols = 32 channels = 3 filter_rows = 7 filter_cols = filter_rows num_filters = 16 images = shared(rng.uniform(-1., 1., (channels, rows, cols, batch_size)).astype('float32'), name='images') filters = shared(rng.uniform(-1., 1., (channels, filter_rows, filter_cols, num_filters)).astype('float32'), name='filters') gpu_images = gpu_from_host(images) gpu_filters = gpu_from_host(filters) output = FilterActs()(gpu_images, gpu_filters) output = host_from_gpu(output) images_bc01 = images.dimshuffle(3, 0, 1, 2) filters_bc01 = filters.dimshuffle(3, 0, 1, 2) filters_bc01 = filters_bc01[:, :, ::-1, ::-1] output_conv2d = conv2d(images_bc01, filters_bc01, border_mode='valid') output_conv2d = output_conv2d.dimshuffle(1, 2, 3, 0) f = function([], [output, output_conv2d]) def err(): output, output_conv2d = f() diff = output - output_conv2d return np.abs(diff).max() prev_err = err() accepted_steps = 0 while True: logger.debug('Current error: {0}'.format(prev_err)) change_filters = rng.randint(2) if change_filters: target = filters else: target = images old_val = target.get_value() selector = rng.randint(2) if selector == 0: new_val = old_val + rng.uniform(-.1, .1, old_val.shape) else: idx1 = rng.randint(old_val.shape[0]) idx2 = rng.randint(old_val.shape[1]) idx3 = rng.randint(old_val.shape[2]) idx4 = rng.randint(old_val.shape[3]) new_val = old_val.copy() new_val[idx1, idx2, idx3, idx4] += rng.uniform(-1., 1.) new_val = new_val.astype(old_val.dtype) target.set_value(new_val) new_err = err() if new_err <= prev_err: logger.debug( 'Failed to move beyond step {0}'.format(accepted_steps)) target.set_value(old_val) else: prev_err = new_err accepted_steps += 1 if __name__ == "__main__": main()
bsd-3-clause
bradwoo8621/Swift-Study
Instagram/Pods/AVOSCloudCrashReporting/Breakpad/src/tools/gyp/test/hello/gyptest-regyp-output.py
201
1077
#!/usr/bin/env python # Copyright (c) 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verifies that Makefiles get rebuilt when a source gyp file changes and --generator-output is used. """ import TestGyp # Regenerating build files when a gyp file changes is currently only supported # by the make and Android generators, and --generator-output is not supported # by Android and ninja, so we can only test for make. test = TestGyp.TestGyp(formats=['make']) CHDIR='generator-output' test.run_gyp('hello.gyp', '--generator-output=%s' % CHDIR) test.build('hello.gyp', test.ALL, chdir=CHDIR) test.run_built_executable('hello', stdout="Hello, world!\n", chdir=CHDIR) # Sleep so that the changed gyp file will have a newer timestamp than the # previously generated build files. test.sleep() test.write('hello.gyp', test.read('hello2.gyp')) test.build('hello.gyp', test.ALL, chdir=CHDIR) test.run_built_executable('hello', stdout="Hello, two!\n", chdir=CHDIR) test.pass_test()
mit
sadasu/networking-cisco-ml2_ucsm
doc/source/conf.py
1
2473
# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', #'sphinx.ext.intersphinx', 'oslosphinx' ] # autodoc generation is a bit aggressive and a nuisance when doing heavy # text edit cycles. # execute "export SPHINX_DEBUG=1" in your terminal to disable # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'networking-cisco-ml2_ucsm' copyright = u'2013, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme_path = ["."] # html_theme = '_theme' # html_static_path = ['static'] # Output file base name for HTML help builder. htmlhelp_basename = '%sdoc' % project # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', '%s.tex' % project, u'%s Documentation' % project, u'OpenStack Foundation', 'manual'), ] # Example configuration for intersphinx: refer to the Python standard library. #intersphinx_mapping = {'http://docs.python.org/': None}
apache-2.0
motkeg/Deep-learning
DRL/A2C/agent.py
1
4164
import tensorflow as tf import numpy as np import policy import tensorflow.keras.losses as kls import tensorflow.keras.optimizers as ko class A2CAgent: def __init__(self, model): self.model = model self.params = {'value': 0.5, 'entropy': 0.0001, 'gamma': 0.99} self.model.compile( optimizer=ko.RMSprop(lr=0.0007), # define separate losses for policy logits and value loss=[self._logits_loss, self._value_loss] ) #model.summary() def test(self, env, render=True): obs, done, ep_reward = env.reset(), False, 0 while not done: action, _ = self.model.action_value(obs[None, :]) obs, reward, done, _ = env.step(action) ep_reward += reward if render: env.render() return ep_reward def train(self, env, batch_sz=32, updates=1000): # storage helpers for a single batch of data actions = np.empty((batch_sz,), dtype=np.int32) rewards, dones, values = np.empty((3, batch_sz)) obs_shape = env.observation_space.shape observations = np.empty((batch_sz,) + obs_shape) # collect samples, send to optimizer, repeat updates times ep_rews = [0.0] next_obs = env.reset() for update in range(updates): for step in range(batch_sz): observations[step] = next_obs.copy() a, v = self.model.action_value(next_obs[None, :]) actions[step], values[step] = a, v next_obs, rewards[step], dones[step], _ = env.step(actions[step]) ep_rews[-1] += rewards[step] if dones[step]: ep_rews.append(0.0) next_obs = env.reset() _, next_value = self.model.action_value(next_obs[None, :]) returns, advs = self._returns_advantages(rewards, dones, values, next_value) # a trick to input actions and advantages through same API acts_and_advs = np.concatenate([actions[:, None], advs[:, None]], axis=-1) # performs a full training step on the collected batch # note: no need to mess around with gradients, Keras API handles it losses = self.model.train_on_batch(observations, [acts_and_advs, returns]) return ep_rews def _returns_advantages(self, rewards, dones, values, next_value): # next_value is the bootstrap value estimate of a future state (the critic) returns = np.append(np.zeros_like(rewards), next_value, axis=-1) # returns are calculated as discounted sum of future rewards for t in reversed(range(rewards.shape[0])): returns[t] = rewards[t] + self.params['gamma'] * returns[t+1] * (1-dones[t]) returns = returns[:-1] # advantages are returns - baseline, value estimates in our case advantages = returns - values return returns, advantages def _value_loss(self, returns, value): # value loss as MSE between value estimates and returns return self.params['value']*kls.mean_squared_error(returns, value) def _logits_loss(self, acts_and_advs, logits): # a trick to input actions and advantages through same API actions, advantages = tf.split(acts_and_advs, 2, axis=-1) # polymorphic CE loss fn, supports sparse and weighted # from_logits argument ensures normalized probabilities cross_entropy = kls.CategoricalCrossentropy(from_logits=True) # policy loss is defined by policy gradients, weighted by advantages # note: we only calculate the loss on the actions we've actually taken # thus under the hood a sparse version of CE loss will be executed actions = tf.cast(actions, tf.int32) policy_loss = cross_entropy(actions, logits, sample_weight=advantages) # entropy loss can be calculated via CE over itself entropy_loss = cross_entropy(logits, logits) # here signs are flipped because optimizer minimizes return policy_loss - self.params['entropy']*entropy_loss
apache-2.0
duphenix/nutrientdb
nutrientdb/download.py
1
3229
#!/usr/bin/env python3 """ download ******** This is the module for downloading the USDA Food and Nutrition database The base url is; https://www.ars.usda.gov/main/site_main.htm?modecode=12-35-45-00 The main database is currently at; https://www.ars.usda.gov/SP2UserFiles/Place/12354500/Data/SR27/dnload/sr27asc.zip The data description file address; https://www.ars.usda.gov/SP2UserFiles/Place/12354500/Data/SR27/sr27_doc.pdf .. Copyright notice is commented out for Sphinx autodoc .. Copyright (C) 2014, 2015 Matthew Carr .. .. This file is part of nutrientdb. .. .. nutrientdb is free software: you can redistribute it and/or modify .. it under the terms of the GNU General Public License as published by .. the Free Software Foundation, either version 3 of the License, or .. (at your option) any later version. .. nutrientdb is distributed in the hope that it will be useful, .. but WITHOUT ANY WARRANTY; without even the implied warranty of .. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the .. GNU General Public License for more details. .. You should have received a copy of the GNU General Public License .. along with nutrientdb. If not, see <http://www.gnu.org/licenses/>. """ import os import urllib.request import errno import zipfile def make_dir(dir_path): """ Make a directory, unless it already exists. """ if not os.path.exists(dir_path): try: os.makedirs(dir_path) except OSError as exception: if exception.errno != errno.EEXIST: raise def download_sr(file_dir, sr="sr28", overwrite=False): """ Download the Standard Reference, version 28 by default Manually tested for SR25, SR26 and SR27. """ make_dir(file_dir) download_file = os.path.join(file_dir, sr + ".zip") sr_upper = sr.upper() if sr > 'sr26': sr = sr + 'asc' if sr > 'sr27': sr_upper = "SR/" + sr_upper url = ("https://www.ars.usda.gov/SP2UserFiles/Place/12354500/Data/" + sr_upper + "/dnload/" + sr + ".zip") if overwrite or (not os.path.exists(download_file)): """does not overwrite an existing file, unless overwrite == True""" data = urllib.request.urlopen(url) with open(download_file, 'w+b') as file: file.write(data.read()) def expand_files(source_file, expand_dir): """ Extract the data files from the file we downloaded from the website """ if os.path.exists(source_file): make_dir(expand_dir) with zipfile.ZipFile(file=source_file) as zf: zf.extractall(path=expand_dir) if __name__ == "__main__": # This puts the download and data one directory up from # where the script it run parent_dir = os.path.abspath(os.pardir) sr = "sr28" download_files_path = os.path.join(parent_dir, "download_files") download_file = os.path.join(download_files_path, sr + ".zip") raw_data_file_path = os.path.join(parent_dir, "data_files") print(os.getcwd(), "script load") download_sr(download_files_path, sr, overwrite=False) print("file downloaded, or already exists") expand_files(download_file, raw_data_file_path) print("files uncompressed")
gpl-3.0
allmende/synnefo
snf-stats-app/distribute_setup.py
82
15757
#!python """Bootstrap distribute installation If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: from distribute_setup import use_setuptools use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ import os import sys import time import fnmatch import tempfile import tarfile from distutils import log try: from site import USER_SITE except ImportError: USER_SITE = None try: import subprocess def _python_cmd(*args): args = (sys.executable,) + args return subprocess.call(args) == 0 except ImportError: # will be used for python 2.3 def _python_cmd(*args): args = (sys.executable,) + args # quoting arguments if windows if sys.platform == 'win32': def quote(arg): if ' ' in arg: return '"%s"' % arg return arg args = [quote(arg) for arg in args] return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 DEFAULT_VERSION = "0.6.10" DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/" SETUPTOOLS_FAKED_VERSION = "0.6c11" SETUPTOOLS_PKG_INFO = """\ Metadata-Version: 1.0 Name: setuptools Version: %s Summary: xxxx Home-page: xxx Author: xxx Author-email: xxx License: xxx Description: xxx """ % SETUPTOOLS_FAKED_VERSION def _install(tarball): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # installing log.warn('Installing Distribute') if not _python_cmd('setup.py', 'install'): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') finally: os.chdir(old_wd) def _build_egg(egg, tarball, to_dir): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # building an egg log.warn('Building a Distribute egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) finally: os.chdir(old_wd) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') def _do_download(version, download_base, to_dir, download_delay): egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' % (version, sys.version_info[0], sys.version_info[1])) if not os.path.exists(egg): tarball = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, tarball, to_dir) sys.path.insert(0, egg) import setuptools setuptools.bootstrap_install_from = egg def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15, no_fake=True): # making sure we use the absolute path to_dir = os.path.abspath(to_dir) was_imported = 'pkg_resources' in sys.modules or \ 'setuptools' in sys.modules try: try: import pkg_resources if not hasattr(pkg_resources, '_distribute'): if not no_fake: _fake_setuptools() raise ImportError except ImportError: return _do_download(version, download_base, to_dir, download_delay) try: pkg_resources.require("distribute>="+version) return except pkg_resources.VersionConflict: e = sys.exc_info()[1] if was_imported: sys.stderr.write( "The required version of distribute (>=%s) is not available,\n" "and can't be installed while this script is running. Please\n" "install a more recent version first, using\n" "'easy_install -U distribute'." "\n\n(Currently using %r)\n" % (version, e.args[0])) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return _do_download(version, download_base, to_dir, download_delay) except pkg_resources.DistributionNotFound: return _do_download(version, download_base, to_dir, download_delay) finally: if not no_fake: _create_fake_setuptools_pkg_info(to_dir) def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay=15): """Download distribute from a specified location and return its filename `version` should be a valid distribute version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen tgz_name = "distribute-%s.tar.gz" % version url = download_base + tgz_name saveto = os.path.join(to_dir, tgz_name) src = dst = None if not os.path.exists(saveto): # Avoid repeated downloads try: log.warn("Downloading %s", url) src = urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = src.read() dst = open(saveto, "wb") dst.write(data) finally: if src: src.close() if dst: dst.close() return os.path.realpath(saveto) def _no_sandbox(function): def __no_sandbox(*args, **kw): try: from setuptools.sandbox import DirectorySandbox if not hasattr(DirectorySandbox, '_old'): def violation(*args): pass DirectorySandbox._old = DirectorySandbox._violation DirectorySandbox._violation = violation patched = True else: patched = False except ImportError: patched = False try: return function(*args, **kw) finally: if patched: DirectorySandbox._violation = DirectorySandbox._old del DirectorySandbox._old return __no_sandbox def _patch_file(path, content): """Will backup the file then patch it""" existing_content = open(path).read() if existing_content == content: # already patched log.warn('Already patched.') return False log.warn('Patching...') _rename_path(path) f = open(path, 'w') try: f.write(content) finally: f.close() return True _patch_file = _no_sandbox(_patch_file) def _same_content(path, content): return open(path).read() == content def _rename_path(path): new_name = path + '.OLD.%s' % time.time() log.warn('Renaming %s into %s', path, new_name) os.rename(path, new_name) return new_name def _remove_flat_installation(placeholder): if not os.path.isdir(placeholder): log.warn('Unkown installation at %s', placeholder) return False found = False for file in os.listdir(placeholder): if fnmatch.fnmatch(file, 'setuptools*.egg-info'): found = True break if not found: log.warn('Could not locate setuptools*.egg-info') return log.warn('Removing elements out of the way...') pkg_info = os.path.join(placeholder, file) if os.path.isdir(pkg_info): patched = _patch_egg_dir(pkg_info) else: patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) if not patched: log.warn('%s already patched.', pkg_info) return False # now let's move the files out of the way for element in ('setuptools', 'pkg_resources.py', 'site.py'): element = os.path.join(placeholder, element) if os.path.exists(element): _rename_path(element) else: log.warn('Could not find the %s element of the ' 'Setuptools distribution', element) return True _remove_flat_installation = _no_sandbox(_remove_flat_installation) def _after_install(dist): log.warn('After install bootstrap.') placeholder = dist.get_command_obj('install').install_purelib _create_fake_setuptools_pkg_info(placeholder) def _create_fake_setuptools_pkg_info(placeholder): if not placeholder or not os.path.exists(placeholder): log.warn('Could not find the install location') return pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) setuptools_file = 'setuptools-%s-py%s.egg-info' % \ (SETUPTOOLS_FAKED_VERSION, pyver) pkg_info = os.path.join(placeholder, setuptools_file) if os.path.exists(pkg_info): log.warn('%s already exists', pkg_info) return log.warn('Creating %s', pkg_info) f = open(pkg_info, 'w') try: f.write(SETUPTOOLS_PKG_INFO) finally: f.close() pth_file = os.path.join(placeholder, 'setuptools.pth') log.warn('Creating %s', pth_file) f = open(pth_file, 'w') try: f.write(os.path.join(os.curdir, setuptools_file)) finally: f.close() _create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info) def _patch_egg_dir(path): # let's check if it's already patched pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') if os.path.exists(pkg_info): if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): log.warn('%s already patched.', pkg_info) return False _rename_path(path) os.mkdir(path) os.mkdir(os.path.join(path, 'EGG-INFO')) pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') f = open(pkg_info, 'w') try: f.write(SETUPTOOLS_PKG_INFO) finally: f.close() return True _patch_egg_dir = _no_sandbox(_patch_egg_dir) def _before_install(): log.warn('Before install bootstrap.') _fake_setuptools() def _under_prefix(location): if 'install' not in sys.argv: return True args = sys.argv[sys.argv.index('install')+1:] for index, arg in enumerate(args): for option in ('--root', '--prefix'): if arg.startswith('%s=' % option): top_dir = arg.split('root=')[-1] return location.startswith(top_dir) elif arg == option: if len(args) > index: top_dir = args[index+1] return location.startswith(top_dir) if arg == '--user' and USER_SITE is not None: return location.startswith(USER_SITE) return True def _fake_setuptools(): log.warn('Scanning installed packages') try: import pkg_resources except ImportError: # we're cool log.warn('Setuptools or Distribute does not seem to be installed.') return ws = pkg_resources.working_set try: setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools', replacement=False)) except TypeError: # old distribute API setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools')) if setuptools_dist is None: log.warn('No setuptools distribution found') return # detecting if it was already faked setuptools_location = setuptools_dist.location log.warn('Setuptools installation detected at %s', setuptools_location) # if --root or --preix was provided, and if # setuptools is not located in them, we don't patch it if not _under_prefix(setuptools_location): log.warn('Not patching, --root or --prefix is installing Distribute' ' in another location') return # let's see if its an egg if not setuptools_location.endswith('.egg'): log.warn('Non-egg installation') res = _remove_flat_installation(setuptools_location) if not res: return else: log.warn('Egg installation') pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') if (os.path.exists(pkg_info) and _same_content(pkg_info, SETUPTOOLS_PKG_INFO)): log.warn('Already patched.') return log.warn('Patching...') # let's create a fake egg replacing setuptools one res = _patch_egg_dir(setuptools_location) if not res: return log.warn('Patched done.') _relaunch() def _relaunch(): log.warn('Relaunching...') # we have to relaunch the process # pip marker to avoid a relaunch bug if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']: sys.argv[0] = 'setup.py' args = [sys.executable] + sys.argv sys.exit(subprocess.call(args)) def _extractall(self, path=".", members=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). """ import copy import operator from tarfile import ExtractError directories = [] if members is None: members = self for tarinfo in members: if tarinfo.isdir(): # Extract directories with a safe mode. directories.append(tarinfo) tarinfo = copy.copy(tarinfo) tarinfo.mode = 448 # decimal for oct 0700 self.extract(tarinfo, path) # Reverse sort directories. if sys.version_info < (2, 4): def sorter(dir1, dir2): return cmp(dir1.name, dir2.name) directories.sort(sorter) directories.reverse() else: directories.sort(key=operator.attrgetter('name'), reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: dirpath = os.path.join(path, tarinfo.name) try: self.chown(tarinfo, dirpath) self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError: e = sys.exc_info()[1] if self.errorlevel > 1: raise else: self._dbg(1, "tarfile: %s" % e) def main(argv, version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" tarball = download_setuptools() _install(tarball) if __name__ == '__main__': main(sys.argv[1:])
gpl-3.0
hcs42/heapkeeper-old
src/test_hkconfig.py
1
5596
#!/usr/bin/python # This file is part of Heapkeeper. # # Heapkeeper is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) any # later version. # # Heapkeeper is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along with # Heapkeeper. If not, see <http://www.gnu.org/licenses/>. # Copyright (C) 2010 Csaba Hoch """Tests the hkconfig module. Usage: $ python src/test_hkconfig.py """ import unittest import hkutils import hkconfig class Test__config(unittest.TestCase): """Tests the configuration objects.""" def test__format_3(self): """Tests the following functions: - :func:`hkconfig:unify_format` - :func:`hkconfig:unify_format_3` - :func:`hkconfig:unify_str_to_str_dict` - :func:`hkconfig:unify_server` """ # Specifying only the mandatory fields self.assertEqual( hkconfig.unify_config( {'paths': {'html_dir': '-html_dir'}, 'heaps': {'-heap': {'path': '-path'}}}), {'paths': {'html_dir': '-html_dir'}, 'heaps': {'-heap': {'path': '-path', 'id': '-heap', 'name': '-heap', 'nicknames': {}}}, 'nicknames': {}, 'accounts': {}}) # Specifying all fields self.assertEqual( hkconfig.unify_config( {'paths': {'html_dir': '-html_dir'}, 'heaps': {'-heap': {'path': '-path', 'id': '-id', 'name': '-name', 'server': {'host': '-host2', 'port': '2222', 'imaps': 'true', 'username': '-user2', 'password': '-pw2'}, 'nicknames': {'a': 'b'}}}, 'server': {'host': '-host', 'port': '1111', 'imaps': 'false', 'username': '-username', 'password': '-password'}, 'nicknames': {'c': 'd'}, 'accounts': {'user1': 'pass1', 'user2': 'pass2'}}), {'paths': {'html_dir': '-html_dir'}, 'heaps': {'-heap': {'path': '-path', 'id': '-id', 'name': '-name', 'server': {'host': '-host2', 'port': 2222, 'imaps': 'true', 'username': '-user2', 'password': '-pw2'}, 'nicknames': {'a': 'b'}}}, 'server': {'host': '-host', 'port': 1111, 'imaps': 'false', 'username': '-username', 'password': '-password'}, 'nicknames': {'c': 'd'}, 'accounts': {'user1': 'pass1', 'user2': 'pass2'}}) # Testing when server/password is not specified self.assertEqual( hkconfig.unify_config( {'paths': {'html_dir': '-html_dir'}, 'heaps': {'-heap': {'path': '-path'}}, 'server': {'host': '-host', 'port': '1111', 'username': '-username'}}), {'paths': {'html_dir': '-html_dir'}, 'heaps': {'-heap': {'path': '-path', 'id': '-heap', 'name': '-heap', 'nicknames': {}}}, 'server': {'host': '-host', 'port': 1111, 'username': '-username'}, 'nicknames': {}, 'accounts': {}}) # Testing several heaps self.assertEqual( hkconfig.unify_config( {'paths': {'html_dir': '-html_dir'}, 'heaps': {'-heap1': {'path': '-path1'}, '-heap2': {'path': '-path2'}}}), {'paths': {'html_dir': '-html_dir'}, 'heaps': {'-heap1': {'path': '-path1', 'id': '-heap1', 'name': '-heap1', 'nicknames': {}}, '-heap2': {'path': '-path2', 'id': '-heap2', 'name': '-heap2', 'nicknames': {}}}, 'nicknames': {}, 'accounts': {}}) # Testing several heaps self.assertRaises( KeyError, lambda: hkconfig.unify_config({})) if __name__ == '__main__': hkutils.set_log(False) unittest.main()
gpl-3.0
grap/server-tools
__unported__/server_env_base_external_referentials/__openerp__.py
17
1921
# -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2011-2012 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name": "Server environment for base_external_referential", "version": "1.0", "depends": ["base", 'server_environment', 'base_external_referentials'], "author": "Camptocamp", 'license': 'AGPL-3', "description": """This module is based on the server_environment module to use files for configuration. Thus we can have a different file for each environment (dev, test, staging, prod). This module define the config variables for the base_external_referential module. In the configuration file, you can configure the url, login and password of the referentials Exemple of the section to put in the configuration file : [external_referential.name_of_my_external_referential] location = http://localhost/magento/ apiusername = my_api_login apipass = my_api_password """, "website": "http://www.camptocamp.com", "category": "Tools", "init_xml": [], "demo_xml": [], "update_xml": [], "installable": False, "active": False, }
agpl-3.0
farhaadila/django-cms
cms/tests/test_nested_plugins.py
18
52220
# -*- coding: utf-8 -*- from __future__ import with_statement import json from djangocms_text_ckeditor.models import Text from cms.api import create_page, add_plugin from cms.constants import PLUGIN_MOVE_ACTION from cms.models import Page from cms.models.placeholdermodel import Placeholder from cms.models.pluginmodel import CMSPlugin from cms.tests.test_plugins import PluginsTestBaseCase from cms.utils.compat.tests import UnittestCompatMixin from cms.utils.copy_plugins import copy_plugins_to URL_CMS_MOVE_PLUGIN = u'/en/admin/cms/page/%d/move-plugin/' URL_CMS_ADD_PLUGIN = u'/en/admin/cms/page/%d/add-plugin/' class NestedPluginsTestCase(PluginsTestBaseCase, UnittestCompatMixin): def reorder_positions(self, plugin=None, parent=None): if parent: parent_id = parent.pk plugin = parent else: parent_id = plugin.parent_id x = 0 for p in CMSPlugin.objects.filter(parent_id=parent_id, language=plugin.language, placeholder_id=plugin.placeholder_id): p.position = x p.save() x += 1 def copy_placeholders_and_check_results(self, placeholders): """ This function is not itself a test; rather, it can be used by any test that has created placeholders. It will check that whatever the plugin structure in the placeholder, it will be copied accurately when they are copied. placeholders is a list of placeholders """ for original_placeholder in placeholders: # get the plugins original_plugins = original_placeholder.get_plugins() # copy them to a new placeholder copied_placeholder = Placeholder.objects.create(slot=original_placeholder.slot) copy_plugins_to( original_placeholder.get_plugins(), copied_placeholder ) copied_plugins = copied_placeholder.get_plugins() # we should find the same number of plugins in both placeholders self.assertEqual( original_plugins.count(), copied_plugins.count() ) # quick check: make sure the two querysets match: for original, copy in zip(original_plugins, copied_plugins): self.assertEqual( Text.objects.get(id=original.id).body, Text.objects.get(id=copy.id).body ) # Now build a *tree* of the plugins, and match those - it's not # enough just to compare querysets as above; we should *also* check # that when we build a tree, the various nodes are assembled as we # would expect. We will pump the trees into a pair of lists: original_plugins_list = [] copied_plugins_list = [] # This function builds the tree of plugins, starting from its roots. # In that respect it's like many of the plugin tree-building # routines elsewhere in the system. def plugin_list_from_tree(roots, plugin_list): for plugin in roots: plugin_list.append(plugin) # recurse over the set of nodes plugin_list_from_tree(plugin.get_children(), plugin_list) # build the tree for each set of plugins plugin_list_from_tree(original_plugins.filter(depth=1), original_plugins_list) plugin_list_from_tree(copied_plugins.filter(depth=1), copied_plugins_list) self.assertEqual(len(original_plugins_list), original_plugins.count()) self.assertEqual(len(copied_plugins_list), copied_plugins.count()) # Check that each pair of items in the two lists match, in lots of # different ways for original, copy in zip(original_plugins_list, copied_plugins_list): original_text_plugin = Text.objects.get(id=original.id) copied_text_plugin = Text.objects.get(id=copy.id) # This first one is a sanity test, just to prove that we aren't # simply comparing *exactly the same items* in all these tests. # It could happen... self.assertNotEquals(original.id, copy.id) self.assertEqual( original_text_plugin.body, copied_text_plugin.body ) self.assertEqual( original_text_plugin.depth, copied_text_plugin.depth ) self.assertEqual( original_text_plugin.position, copied_text_plugin.position ) self.assertEqual( original_text_plugin.numchild, copied_text_plugin.numchild ) self.assertEqual( original_text_plugin.get_descendant_count(), copied_text_plugin.get_descendant_count() ) self.assertEqual( original_text_plugin.get_ancestors().count(), copied_text_plugin.get_ancestors().count() ) # just in case the test method that called us wants it: return copied_placeholder def test_plugin_fix_tree(self): """ Tests CMSPlugin.fix_tree by creating a plugin structure, setting the position value to Null for all the plugins and then rebuild the tree. The structure below isn't arbitrary, but has been designed to test various conditions, including: * nodes four levels deep * siblings with and without children 1 2 4 10 8 3 9 5 6 7 """ placeholder = Placeholder(slot=u"some_slot") placeholder.save() # a good idea, if not strictly necessary # plugin in placeholder plugin_1 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"01") # IMPORTANT: plugins must be reloaded, before they can be assigned # as a parent. Otherwise, the Tree structure doesn't seem to rebuild # properly. # child of plugin_1 plugin_1 = self.reload(plugin_1) plugin_2 = add_plugin(placeholder, u"TextPlugin", u"en", # nopyflakes body=u"02", target=plugin_1, ) # create a second child of plugin_1 plugin_1 = self.reload(plugin_1) plugin_3 = add_plugin(placeholder, u"TextPlugin", u"en", # nopyflakes body=u"03", target=plugin_1 ) # child of plugin_2 plugin_2 = self.reload(plugin_2) plugin_4 = add_plugin(placeholder, u"TextPlugin", u"en", # nopyflakes body=u"04", target=plugin_2 ) plugin_1 = self.reload(plugin_1) # nopyflakes # create a second root plugin plugin_5 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"05") left = CMSPlugin.objects.filter(parent__isnull=True).order_by('path')[0] plugin_5 = self.reload(plugin_5) plugin_5 = plugin_5.move(left, pos='right') self.reorder_positions(plugin_5) self.reorder_positions(plugin_2) # child of plugin_5 plugin_5 = self.reload(plugin_5) plugin_6 = add_plugin(placeholder, u"TextPlugin", u"en", # nopyflakes body=u"06", target=plugin_5 ) # child of plugin_6 plugin_5 = self.reload(plugin_5) plugin_7 = add_plugin(placeholder, u"TextPlugin", u"en", # nopyflakes body=u"07", target=plugin_5 ) # another child of plugin_2 plugin_2 = self.reload(plugin_2) plugin_8 = add_plugin(placeholder, u"TextPlugin", u"en", # nopyflakes body=u"08", target=plugin_2 ) # child of plugin_3 plugin_3 = self.reload(plugin_3) plugin_9 = add_plugin(placeholder, u"TextPlugin", u"en", # nopyflakes body=u"09", target=plugin_3 ) # child of plugin_4 plugin_4 = self.reload(plugin_4) plugin_10 = add_plugin(placeholder, u"TextPlugin", u"en", # nopyflakes body=u"10", target=plugin_4 ) original_plugins = (placeholder.get_plugins().order_by('position', 'path')) CMSPlugin.objects.update(position=None) CMSPlugin.fix_tree() new_plugins = list(placeholder.get_plugins().order_by('position', 'path')) self.assertSequenceEqual(original_plugins, new_plugins) def test_plugin_deep_nesting_and_copying(self): """ Create a deeply-nested plugin structure, tests its properties, and tests that it is copied accurately when the placeholder containing them is copied. The structure below isn't arbitrary, but has been designed to test various conditions, including: * nodes four levels deep * multiple successive level increases * multiple successive level decreases * successive nodes on the same level followed by level changes * multiple level decreases between successive nodes * siblings with and without children * nodes and branches added to the tree out of sequence First we create the structure: 11 1 2 12 4 10 8 3 9 5 6 7 13 14 and then we move it all around. """ placeholder = Placeholder(slot=u"some_slot") placeholder.save() # a good idea, if not strictly necessary # plugin in placeholder plugin_1 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"01") # IMPORTANT: plugins must be reloaded, before they can be assigned # as a parent. Otherwise, the MPTT structure doesn't seem to rebuild # properly. # child of plugin_1 plugin_1 = self.reload(plugin_1) plugin_2 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"02", target=plugin_1, ) # plugin_2 should be plugin_1's only child # for a single item we use assertSequenceEqual self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_1.pk).get_children(), [CMSPlugin.objects.get(id=plugin_2.pk)]) # create a second child of plugin_1 plugin_1 = self.reload(plugin_1) plugin_3 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"03", target=plugin_1 ) # plugin_2 & plugin_3 should be plugin_1's children # for multiple items we use assertSequenceEqual, because # assertSequenceEqual may re-order the list without warning self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_1.pk).get_children(), [ CMSPlugin.objects.get(id=plugin_2.pk), CMSPlugin.objects.get(id=plugin_3.pk), ]) # child of plugin_2 plugin_2 = self.reload(plugin_2) plugin_4 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"04", target=plugin_2 ) # plugin_4 should be plugin_2's child self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_2.pk).get_children(), [CMSPlugin.objects.get(id=plugin_4.pk)]) # 2,3 & 4 should be descendants of 1 self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_1.pk).get_descendants(), [ # note path ordering of MP reflected here: CMSPlugin.objects.get(id=plugin_2.pk), CMSPlugin.objects.get(id=plugin_4.pk), CMSPlugin.objects.get(id=plugin_3.pk), ], ) plugin_1 = self.reload(plugin_1) # create a second root plugin plugin_5 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"05") left = CMSPlugin.objects.filter(parent__isnull=True).order_by('path')[0] plugin_5 = self.reload(plugin_5) plugin_5 = plugin_5.move(left, pos='right') self.reorder_positions(plugin_5) self.reorder_positions(plugin_2) # child of plugin_5 plugin_5 = self.reload(plugin_5) plugin_6 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"06", target=plugin_5 ) # plugin_6 should be plugin_5's child self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_5.pk).get_children(), [CMSPlugin.objects.get(id=plugin_6.pk)]) # child of plugin_6 plugin_5 = self.reload(plugin_5) plugin_7 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"07", target=plugin_5 ) # plugin_7 should be plugin_5's child self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_5.pk).get_children(), [ CMSPlugin.objects.get(id=plugin_6.pk), CMSPlugin.objects.get(id=plugin_7.pk) ]) # 6 & 7 should be descendants of 5 self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_5.pk).get_descendants(), [ CMSPlugin.objects.get(id=plugin_6.pk), CMSPlugin.objects.get(id=plugin_7.pk), ]) # another child of plugin_2 plugin_2 = self.reload(plugin_2) plugin_8 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"08", target=plugin_2 ) # plugin_4 should be plugin_2's child self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_2.pk).get_children(), [ CMSPlugin.objects.get(id=plugin_4.pk), CMSPlugin.objects.get(id=plugin_8.pk), ]) # child of plugin_3 plugin_3 = self.reload(plugin_3) plugin_9 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"09", target=plugin_3 ) # plugin_9 should be plugin_3's child self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_3.pk).get_children(), [CMSPlugin.objects.get(id=plugin_9.pk)]) # child of plugin_4 plugin_4 = self.reload(plugin_4) plugin_10 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"10", target=plugin_4 ) # plugin_10 should be plugin_4's child self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_4.pk).get_children(), [CMSPlugin.objects.get(id=plugin_10.pk)]) original_plugins = placeholder.get_plugins() self.assertEqual(original_plugins.count(), 10) # elder sibling of plugin_1 plugin_1 = self.reload(plugin_1) plugin_11 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"11", target=plugin_1, position="left" ) self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_1.pk).get_children(), [ CMSPlugin.objects.get(id=plugin_2.pk), CMSPlugin.objects.get(id=plugin_3.pk) ]) # elder sibling of plugin_4 plugin_4 = self.reload(plugin_4) plugin_12 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"12", target=plugin_4, position="left" ) self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_2.pk).get_children(), [ CMSPlugin.objects.get(id=plugin_12.pk), CMSPlugin.objects.get(id=plugin_4.pk), CMSPlugin.objects.get(id=plugin_8.pk) ]) # younger sibling of plugin_7 plugin_7 = self.reload(plugin_7) plugin_13 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"13", target=plugin_7, position="right" ) self.assertSequenceEqual( CMSPlugin.objects.get(id=plugin_5.pk).get_children(), [ CMSPlugin.objects.get(id=plugin_6.pk), CMSPlugin.objects.get(id=plugin_7.pk), CMSPlugin.objects.get(id=plugin_13.pk) ]) # new sibling of plugin_5 plugin_5 = self.reload(plugin_5) plugin_14 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"14" ) self.assertSequenceEqual( CMSPlugin.objects.filter(depth=1).order_by('path'), [ CMSPlugin.objects.get(id=plugin_11.pk), CMSPlugin.objects.get(id=plugin_1.pk), CMSPlugin.objects.get(id=plugin_5.pk), CMSPlugin.objects.get(id=plugin_14.pk) ]) self.copy_placeholders_and_check_results([placeholder]) # now let's move plugins around in the tree # move plugin_2 before plugin_11 plugin_2 = self.reload(plugin_2) plugin_1 = self.reload(plugin_1) old_parent = plugin_2.parent plugin_2.parent_id = plugin_1.parent_id plugin_2.save() plugin_2 = plugin_2.move(target=plugin_1, pos="left") self.reorder_positions(parent=old_parent) self.reorder_positions(plugin_2) self.copy_placeholders_and_check_results([placeholder]) # move plugin_6 after plugin_7 plugin_6 = self.reload(plugin_6) plugin_7 = self.reload(plugin_7) old_parent = plugin_6.parent plugin_6.parent_id = plugin_7.parent_id plugin_6.save() plugin_6 = plugin_6.move(target=plugin_7, pos="right") self.reorder_positions(parent=old_parent) self.reorder_positions(plugin_6) self.copy_placeholders_and_check_results([placeholder]) # move plugin_3 before plugin_2 plugin_2 = self.reload(plugin_2) plugin_3 = self.reload(plugin_3) old_parent = plugin_3.parent plugin_3.parent_id = plugin_2.parent_id plugin_3.save() plugin_3 = plugin_3.move(target=plugin_2, pos="left") self.reorder_positions(parent=old_parent) self.reorder_positions(plugin_3) self.copy_placeholders_and_check_results([placeholder]) # make plugin_3 plugin_2's first-child plugin_2 = self.reload(plugin_2) plugin_3 = self.reload(plugin_3) old_parent = plugin_3.parent plugin_3.parent_id = plugin_2.pk plugin_3.save() plugin_3 = plugin_3.move(target=plugin_2, pos="first-child") self.reorder_positions(CMSPlugin.objects.filter(placeholder_id=plugin_3.placeholder_id, language=plugin_3.language, depth=1)[0]) self.reorder_positions(plugin_3) self.copy_placeholders_and_check_results([placeholder]) # make plugin_7 plugin_2's first-child plugin_3 = self.reload(plugin_3) plugin_7 = self.reload(plugin_7) old_parent = plugin_7.parent plugin_7.parent_id = plugin_3.parent_id plugin_7.save() plugin_7 = plugin_7.move(target=plugin_3, pos="right") self.reorder_positions(parent=old_parent) self.reorder_positions(plugin_7) self.copy_placeholders_and_check_results([placeholder, ]) def test_nested_plugin_on_page(self): """ Validate a textplugin with a nested link plugin mptt values are correctly showing a parent child relationship of a nested plugin """ with self.settings(CMS_PERMISSION=False): # setup page 1 page_one = create_page(u"Three Placeholder", u"col_three.html", u"en", position=u"last-child", published=True, in_navigation=True) page_one_ph_two = page_one.placeholders.get(slot=u"col_left") # add a plugin pre_nesting_body = u"<p>the nested text plugin with a link inside</p>" text_plugin = add_plugin(page_one_ph_two, u"TextPlugin", u"en", body=pre_nesting_body) # prepare nestin plugin page_one_ph_two = self.reload(page_one_ph_two) text_plugin = self.reload(text_plugin) link_plugin = add_plugin(page_one_ph_two, u"LinkPlugin", u"en", target=text_plugin) link_plugin.name = u"django-cms Link" link_plugin.url = u"https://www.django-cms.org" # as for some reason mptt does not # update the parent child relationship # in the add_plugin method when a target present # but this is not the topic of the test link_plugin.parent = text_plugin link_plugin.save() # reloading needs to be done after every save link_plugin = self.reload(link_plugin) text_plugin = self.reload(text_plugin) # mptt related insertion correct? msg = u"parent plugin right is not updated, child not inserted correctly" self.assertTrue(text_plugin.position == link_plugin.position, msg=msg) msg = u"link has no parent" self.assertFalse(link_plugin.parent is None, msg=msg) msg = u"parent plugin path is not updated, child not inserted correctly" self.assertTrue(text_plugin.path == link_plugin.path[:4], msg=msg) msg = u"child level is not bigger than parent level" self.assertTrue(text_plugin.depth < link_plugin.depth, msg=msg) # add the link plugin to the body # emulate the editor in admin that adds some txt for the nested plugin in_txt = u"""<img id="plugin_obj_%s" title="Link" alt="Link" src="/static/cms/img/icons/plugins/link.png">""" nesting_body = u"%s<p>%s</p>" % (text_plugin.body, (in_txt % (link_plugin.id))) text_plugin.body = nesting_body text_plugin.save() text_plugin = self.reload(text_plugin) # none of the descendants should have a placeholder other then my own one self.assertEqual(text_plugin.get_descendants().exclude(placeholder=text_plugin.placeholder).count(), 0) post_add_plugin_count = CMSPlugin.objects.count() self.assertEqual(post_add_plugin_count, 2) def test_copy_page_nested_plugin(self): """ Test to verify that page copy with a nested plugin works page one - 3 placeholder col_sidebar: 1 text plugin col_left: 1 text plugin with nested link plugin col_right: no plugin page two (copy target) Verify copied page, placeholders, plugins and body text """ with self.settings(CMS_PERMISSION=False): # setup page 1 page_one = create_page(u"Three Placeholder", u"col_three.html", u"en", position=u"last-child", published=True, in_navigation=True) page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar") page_one_ph_two = page_one.placeholders.get(slot=u"col_left") page_one.placeholders.get(slot=u"col_right") # add the text plugin to placeholder one text_plugin_en = add_plugin(page_one_ph_one, u"TextPlugin", u"en", body="Hello World") self.assertEqual(text_plugin_en.id, CMSPlugin.objects.all()[0].id) self.assertEqual(text_plugin_en.get_children().count(), 0) pre_add_plugin_count = CMSPlugin.objects.count() self.assertEqual(pre_add_plugin_count, 1) ### # add a plugin to placeholder two ### pre_nesting_body = u"<p>the nested text plugin with a link inside</p>" text_plugin_two = add_plugin(page_one_ph_two, u"TextPlugin", u"en", body=pre_nesting_body) text_plugin_two = self.reload(text_plugin_two) # prepare nesting plugin page_one_ph_two = self.reload(page_one_ph_two) text_plugin_two = self.reload(text_plugin_two) link_plugin = add_plugin(page_one_ph_two, u"LinkPlugin", u"en", target=text_plugin_two) link_plugin.name = u"django-cms Link" link_plugin.url = u"https://www.django-cms.org" link_plugin.parent = text_plugin_two link_plugin.save() link_plugin = self.reload(link_plugin) text_plugin_two = self.reload(text_plugin_two) in_txt = """<img id="plugin_obj_%s" title="Link" alt="Link" src="/static/cms/img/icons/plugins/link.png">""" nesting_body = "%s<p>%s</p>" % (text_plugin_two.body, (in_txt % (link_plugin.id))) # emulate the editor in admin that adds some txt for the nested plugin text_plugin_two.body = nesting_body text_plugin_two.save() text_plugin_two = self.reload(text_plugin_two) # the link is attached as a child? self.assertEqual(text_plugin_two.get_children().count(), 1) post_add_plugin_count = CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count() self.assertEqual(post_add_plugin_count, 3) page_one.save() # get the plugins from the original page page_one = self.reload(page_one) page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar") page_one_ph_two = page_one.placeholders.get(slot=u"col_left") page_one_ph_three = page_one.placeholders.get(slot=u"col_right") # verify that the plugins got created org_placeholder_one_plugins = page_one_ph_one.get_plugins() self.assertEqual(len(org_placeholder_one_plugins), 1) org_placeholder_two_plugins = page_one_ph_two.get_plugins() self.assertEqual(len(org_placeholder_two_plugins), 2) org_placeholder_three_plugins = page_one_ph_three.get_plugins() self.assertEqual(len(org_placeholder_three_plugins), 0) self.assertEqual(page_one.placeholders.count(), 3) placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count() self.assertEqual(placeholder_count, 3) self.assertEqual(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count(), 3) ## # setup page_copy_target page ## page_copy_target = create_page("Three Placeholder - page copy target", "col_three.html", "en", position="last-child", published=True, in_navigation=True) all_page_count = Page.objects.drafts().count() pre_copy_placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count() self.assertEqual(pre_copy_placeholder_count, 6) # copy the page superuser = self.get_superuser() with self.login_user_context(superuser): page_two = self.copy_page(page_one, page_copy_target) # validate the expected pages,placeholders,plugins,pluginbodies after_copy_page_plugin_count = CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count() self.assertEqual(after_copy_page_plugin_count, 6) # check the amount of copied stuff after_copy_page_count = Page.objects.drafts().count() after_copy_placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count() self.assertGreater(after_copy_page_count, all_page_count, u"no new page after copy") self.assertGreater(after_copy_page_plugin_count, post_add_plugin_count, u"plugin count is not grown") self.assertGreater(after_copy_placeholder_count, pre_copy_placeholder_count, u"placeholder count is not grown") self.assertEqual(after_copy_page_count, 3, u"no new page after copy") # original placeholder page_one = self.reload(page_one) page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar") page_one_ph_two = page_one.placeholders.get(slot=u"col_left") page_one_ph_three = page_one.placeholders.get(slot=u"col_right") # check if there are multiple pages assigned to this placeholders found_page = page_one_ph_one.page if page_one_ph_one else None self.assertEqual(found_page, page_one) found_page = page_one_ph_two.page if page_one_ph_two else None self.assertEqual(found_page, page_one) found_page = page_one_ph_three.page if page_one_ph_three else None self.assertEqual(found_page, page_one) page_two = self.reload(page_two) page_two_ph_one = page_two.placeholders.get(slot=u"col_sidebar") page_two_ph_two = page_two.placeholders.get(slot=u"col_left") page_two_ph_three = page_two.placeholders.get(slot=u"col_right") # check if there are multiple pages assigned to this placeholders found_page = page_two_ph_one.page if page_two_ph_one else None self.assertEqual(found_page, page_two) found_page = page_two_ph_two.page if page_two_ph_two else None self.assertEqual(found_page, page_two) found_page = page_two_ph_three.page if page_two_ph_three else None self.assertEqual(found_page, page_two) # check the stored placeholders org vs copy msg = 'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % ( page_two_ph_one.pk, page_one_ph_one.pk, page_two.pk) self.assertNotEquals(page_two_ph_one.pk, page_one_ph_one.pk, msg) msg = 'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % ( page_two_ph_two.pk, page_one_ph_two.pk, page_two.pk) self.assertNotEquals(page_two_ph_two.pk, page_one_ph_two.pk, msg) msg = 'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % ( page_two_ph_three.pk, page_one_ph_three.pk, page_two.pk) self.assertNotEquals(page_two_ph_three.pk, page_one_ph_three.pk, msg) # get the plugins from the original page org_placeholder_one_plugins = page_one_ph_one.get_plugins() self.assertEqual(len(org_placeholder_one_plugins), 1) org_placeholder_two_plugins = page_one_ph_two.get_plugins() self.assertEqual(len(org_placeholder_two_plugins), 2) org_placeholder_three_plugins = page_one_ph_three.get_plugins() self.assertEqual(len(org_placeholder_three_plugins), 0) # get the plugins from the copied page copied_placeholder_one_plugins = page_two_ph_one.get_plugins() self.assertEqual(len(copied_placeholder_one_plugins), 1) copied_placeholder_two_plugins = page_two_ph_two.get_plugins() self.assertEqual(len(copied_placeholder_two_plugins), 2) copied_placeholder_three_plugins = page_two_ph_three.get_plugins() self.assertEqual(len(copied_placeholder_three_plugins), 0) # verify the plugins got copied # placeholder 1 count_plugins_copied = len(copied_placeholder_one_plugins) count_plugins_org = len(org_placeholder_one_plugins) msg = u"plugin count %s %s for placeholder one not equal" % (count_plugins_copied, count_plugins_org) self.assertEqual(count_plugins_copied, count_plugins_org, msg) # placeholder 2 count_plugins_copied = len(copied_placeholder_two_plugins) count_plugins_org = len(org_placeholder_two_plugins) msg = u"plugin count %s %s for placeholder two not equal" % (count_plugins_copied, count_plugins_org) self.assertEqual(count_plugins_copied, count_plugins_org, msg) # placeholder 3 count_plugins_copied = len(copied_placeholder_three_plugins) count_plugins_org = len(org_placeholder_three_plugins) msg = u"plugin count %s %s for placeholder three not equal" % (count_plugins_copied, count_plugins_org) self.assertEqual(count_plugins_copied, count_plugins_org, msg) # verify the body of text plugin with nested link plugin # org to copied org_nested_text_plugin = None # do this iteration to find the real text plugin with the attached link # the inheritance mechanism for the cmsplugins works through # (tuple)get_plugin_instance() for x in org_placeholder_two_plugins: if x.plugin_type == u"TextPlugin": instance = x.get_plugin_instance()[0] if instance.body.startswith(pre_nesting_body): org_nested_text_plugin = instance break copied_nested_text_plugin = None for x in copied_placeholder_two_plugins: if x.plugin_type == u"TextPlugin": instance = x.get_plugin_instance()[0] if instance.body.startswith(pre_nesting_body): copied_nested_text_plugin = instance break msg = u"orginal nested text plugin not found" self.assertNotEquals(org_nested_text_plugin, None, msg=msg) msg = u"copied nested text plugin not found" self.assertNotEquals(copied_nested_text_plugin, None, msg=msg) # get the children ids of the texplugin with a nested link # to check if the body of the text is genrated correctly org_link_child_plugin = org_nested_text_plugin.get_children()[0] copied_link_child_plugin = copied_nested_text_plugin.get_children()[0] # validate the textplugin body texts msg = u"org plugin and copied plugin are the same" self.assertTrue(org_link_child_plugin.id != copied_link_child_plugin.id, msg) needle = u"plugin_obj_%s" msg = u"child plugin id differs to parent in body plugin_obj_id" # linked child is in body self.assertTrue(org_nested_text_plugin.body.find(needle % (org_link_child_plugin.id)) != -1, msg) msg = u"copy: child plugin id differs to parent in body plugin_obj_id" self.assertTrue(copied_nested_text_plugin.body.find(needle % (copied_link_child_plugin.id)) != -1, msg) # really nothing else msg = u"child link plugin id differs to parent body plugin_obj_id" self.assertTrue(org_nested_text_plugin.body.find(needle % (copied_link_child_plugin.id)) == -1, msg) msg = u"copy: child link plugin id differs to parent body plugin_obj_id" self.assertTrue(copied_nested_text_plugin.body.find(needle % (org_link_child_plugin.id)) == -1, msg) # now reverse lookup the placeholders from the plugins org_placeholder = org_link_child_plugin.placeholder copied_placeholder = copied_link_child_plugin.placeholder msg = u"placeholder of the orginal plugin and copied plugin are the same" ok = ((org_placeholder.id != copied_placeholder.id)) self.assertTrue(ok, msg) def test_copy_page_nested_plugin_moved_parent_plugin(self): """ Test to verify that page copy with a nested plugin works when a plugin with child got moved to another placeholder page one - 3 placeholder col_sidebar: 1 text plugin col_left: 1 text plugin with nested link plugin col_right: no plugin page two (copy target) step2: move the col_left text plugin to col_right col_sidebar: 1 text plugin col_left: no plugin col_right: 1 text plugin with nested link plugin verify the copied page structure """ with self.settings(CMS_PERMISSION=False): # setup page 1 page_one = create_page(u"Three Placeholder", u"col_three.html", u"en", position=u"last-child", published=True, in_navigation=True) page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar") page_one_ph_two = page_one.placeholders.get(slot=u"col_left") page_one.placeholders.get(slot=u"col_right") # add the text plugin to placeholder one text_plugin_en = add_plugin(page_one_ph_one, u"TextPlugin", u"en", body=u"Hello World") self.assertEqual(text_plugin_en.id, CMSPlugin.objects.all()[0].id) self.assertEqual(text_plugin_en.get_children().count(), 0) pre_add_plugin_count = CMSPlugin.objects.count() self.assertEqual(pre_add_plugin_count, 1) # add a plugin to placeholder twho pre_nesting_body = u"<p>the nested text plugin with a link inside</p>" text_plugin_two = add_plugin(page_one_ph_two, u"TextPlugin", u"en", body=pre_nesting_body) text_plugin_two = self.reload(text_plugin_two) # prepare nestin plugin page_one_ph_two = self.reload(page_one_ph_two) text_plugin_two = self.reload(text_plugin_two) link_plugin = add_plugin(page_one_ph_two, u"LinkPlugin", u"en", target=text_plugin_two) link_plugin.name = u"django-cms Link" link_plugin.url = u"https://www.django-cms.org" link_plugin.parent = text_plugin_two link_plugin.save() # reload after every save link_plugin = self.reload(link_plugin) text_plugin_two = self.reload(text_plugin_two) in_txt = u"""<img id="plugin_obj_%s" title="Link" alt="Link" src="/static/cms/img/icons/plugins/link.png">""" nesting_body = "%s<p>%s</p>" % (text_plugin_two.body, (in_txt % (link_plugin.id))) # emulate the editor in admin that adds some txt for the nested plugin text_plugin_two.body = nesting_body text_plugin_two.save() text_plugin_two = self.reload(text_plugin_two) # the link is attached as a child? self.assertEqual(text_plugin_two.get_children().count(), 1) post_add_plugin_count = CMSPlugin.objects.count() self.assertEqual(post_add_plugin_count, 3) page_one.save() # get the plugins from the original page page_one = self.reload(page_one) page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar") page_one_ph_two = page_one.placeholders.get(slot=u"col_left") page_one_ph_three = page_one.placeholders.get(slot=u"col_right") # verify the plugins got created org_placeholder_one_plugins = page_one_ph_one.get_plugins() self.assertEqual(len(org_placeholder_one_plugins), 1) org_placeholder_two_plugins = page_one_ph_two.get_plugins() self.assertEqual(len(org_placeholder_two_plugins), 2) org_placeholder_three_plugins = page_one_ph_three.get_plugins() self.assertEqual(len(org_placeholder_three_plugins), 0) self.assertEqual(page_one.placeholders.count(), 3) placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count() self.assertEqual(placeholder_count, 3) self.assertEqual(CMSPlugin.objects.count(), 3) # setup page_copy_target page_copy_target = create_page("Three Placeholder - page copy target", "col_three.html", "en", position="last-child", published=True, in_navigation=True) all_page_count = Page.objects.drafts().count() pre_copy_placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count() self.assertEqual(pre_copy_placeholder_count, 6) superuser = self.get_superuser() with self.login_user_context(superuser): # now move the parent text plugin to another placeholder post_data = { 'placeholder_id': page_one_ph_three.id, 'plugin_id': text_plugin_two.id, 'plugin_language':'en', 'plugin_parent':'', } plugin_class = text_plugin_two.get_plugin_class_instance() expected = {'reload': plugin_class.requires_reload(PLUGIN_MOVE_ACTION)} edit_url = URL_CMS_MOVE_PLUGIN % page_one.id response = self.client.post(edit_url, post_data) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.content.decode('utf8')), expected) # check if the plugin got moved page_one = self.reload(page_one) self.reload(text_plugin_two) page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar") page_one_ph_two = page_one.placeholders.get(slot=u"col_left") page_one_ph_three = page_one.placeholders.get(slot=u"col_right") org_placeholder_one_plugins = page_one_ph_one.get_plugins() self.assertEqual(len(org_placeholder_one_plugins), 1) org_placeholder_two_plugins = page_one_ph_two.get_plugins() # the plugin got moved and child got moved self.assertEqual(len(org_placeholder_two_plugins), 0) org_placeholder_three_plugins = page_one_ph_three.get_plugins() self.assertEqual(len(org_placeholder_three_plugins), 2) # copy the page page_two = self.copy_page(page_one, page_copy_target) # validate the expected pages,placeholders,plugins,pluginbodies after_copy_page_plugin_count = CMSPlugin.objects.count() self.assertEqual(after_copy_page_plugin_count, 6) after_copy_page_count = Page.objects.drafts().count() after_copy_placeholder_count = Placeholder.objects.filter(page__publisher_is_draft=True).count() self.assertGreater(after_copy_page_count, all_page_count, u"no new page after copy") self.assertGreater(after_copy_page_plugin_count, post_add_plugin_count, u"plugin count is not grown") self.assertGreater(after_copy_placeholder_count, pre_copy_placeholder_count, u"placeholder count is not grown") self.assertEqual(after_copy_page_count, 3, u"no new page after copy") # validate the structure # orginal placeholder page_one = self.reload(page_one) page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar") page_one_ph_two = page_one.placeholders.get(slot=u"col_left") page_one_ph_three = page_one.placeholders.get(slot=u"col_right") # check if there are multiple pages assigned to this placeholders found_page = page_one_ph_one.page if page_one_ph_one else None self.assertEqual(found_page, page_one) found_page = page_one_ph_two.page if page_one_ph_two else None self.assertEqual(found_page, page_one) found_page = page_one_ph_three.page if page_one_ph_three else None self.assertEqual(found_page, page_one) page_two = self.reload(page_two) page_two_ph_one = page_two.placeholders.get(slot=u"col_sidebar") page_two_ph_two = page_two.placeholders.get(slot=u"col_left") page_two_ph_three = page_two.placeholders.get(slot=u"col_right") # check if there are multiple pages assigned to this placeholders found_page = page_two_ph_one.page if page_two_ph_one else None self.assertEqual(found_page, page_two) found_page = page_two_ph_two.page if page_two_ph_two else None self.assertEqual(found_page, page_two) found_page = page_two_ph_three.page if page_two_ph_three else None self.assertEqual(found_page, page_two) # check the stored placeholders org vs copy msg = u'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % ( page_two_ph_one.pk, page_one_ph_one.pk, page_two.pk) self.assertNotEquals(page_two_ph_one.pk, page_one_ph_one.pk, msg) msg = u'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % ( page_two_ph_two.pk, page_one_ph_two.pk, page_two.pk) self.assertNotEquals(page_two_ph_two.pk, page_one_ph_two.pk, msg) msg = u'placehoder ids copy:%s org:%s copied page %s are identical - tree broken' % ( page_two_ph_three.pk, page_one_ph_three.pk, page_two.pk) self.assertNotEquals(page_two_ph_three.pk, page_one_ph_three.pk, msg) # get the plugins from the original page org_placeholder_one_plugins = page_one_ph_one.get_plugins() self.assertEqual(len(org_placeholder_one_plugins), 1) org_placeholder_two_plugins = page_one_ph_two.get_plugins() self.assertEqual(len(org_placeholder_two_plugins), 0) org_placeholder_three_plugins = page_one_ph_three.get_plugins() self.assertEqual(len(org_placeholder_three_plugins), 2) # get the plugins from the copied page copied_placeholder_one_plugins = page_two_ph_one.get_plugins() self.assertEqual(len(copied_placeholder_one_plugins), 1) copied_placeholder_two_plugins = page_two_ph_two.get_plugins() self.assertEqual(len(copied_placeholder_two_plugins), 0) copied_placeholder_three_plugins = page_two_ph_three.get_plugins() self.assertEqual(len(copied_placeholder_three_plugins), 2) # verify the plugins got copied # placeholder 1 count_plugins_copied = len(copied_placeholder_one_plugins) count_plugins_org = len(org_placeholder_one_plugins) msg = u"plugin count %s %s for placeholder one not equal" % (count_plugins_copied, count_plugins_org) self.assertEqual(count_plugins_copied, count_plugins_org, msg) # placeholder 2 count_plugins_copied = len(copied_placeholder_two_plugins) count_plugins_org = len(org_placeholder_two_plugins) msg = u"plugin count %s %s for placeholder two not equal" % (count_plugins_copied, count_plugins_org) self.assertEqual(count_plugins_copied, count_plugins_org, msg) # placeholder 3 count_plugins_copied = len(copied_placeholder_three_plugins) count_plugins_org = len(org_placeholder_three_plugins) msg = u"plugin count %s %s for placeholder three not equal" % (count_plugins_copied, count_plugins_org) self.assertEqual(count_plugins_copied, count_plugins_org, msg) # verify the body of text plugin with nested link plugin # org to copied org_nested_text_plugin = None # do this iteration to find the real text plugin with the attached link # the inheritance mechanism for the cmsplugins works through # (tuple)get_plugin_instance() for x in org_placeholder_three_plugins: if x.plugin_type == u"TextPlugin": instance = x.get_plugin_instance()[0] if instance.body.startswith(pre_nesting_body): org_nested_text_plugin = instance break copied_nested_text_plugin = None for x in copied_placeholder_three_plugins: if x.plugin_type == u"TextPlugin": instance = x.get_plugin_instance()[0] if instance.body.startswith(pre_nesting_body): copied_nested_text_plugin = instance break msg = u"orginal nested text plugin not found" self.assertNotEquals(org_nested_text_plugin, None, msg=msg) msg = u"copied nested text plugin not found" self.assertNotEquals(copied_nested_text_plugin, None, msg=msg) # get the children ids of the texplugin with a nested link # to check if the body of the text is generated correctly org_link_child_plugin = org_nested_text_plugin.get_children()[0] copied_link_child_plugin = copied_nested_text_plugin.get_children()[0] # validate the textplugin body texts msg = u"org plugin and copied plugin are the same" self.assertNotEqual(org_link_child_plugin.id, copied_link_child_plugin.id, msg) needle = u"plugin_obj_%s" msg = u"child plugin id differs to parent in body plugin_obj_id" # linked child is in body self.assertTrue(org_nested_text_plugin.body.find(needle % (org_link_child_plugin.id)) != -1, msg) msg = u"copy: child plugin id differs to parent in body plugin_obj_id" self.assertTrue(copied_nested_text_plugin.body.find(needle % (copied_link_child_plugin.id)) != -1, msg) # really nothing else msg = u"child link plugin id differs to parent body plugin_obj_id" self.assertTrue(org_nested_text_plugin.body.find(needle % (copied_link_child_plugin.id)) == -1, msg) msg = u"copy: child link plugin id differs to parent body plugin_obj_id" self.assertTrue(copied_nested_text_plugin.body.find(needle % (org_link_child_plugin.id)) == -1, msg) # now reverse lookup the placeholders from the plugins org_placeholder = org_link_child_plugin.placeholder copied_placeholder = copied_link_child_plugin.placeholder msg = u"placeholder of the orginal plugin and copied plugin are the same" self.assertNotEqual(org_placeholder.id, copied_placeholder.id, msg) def test_add_child_plugin(self): page_one = create_page(u"Three Placeholder", u"col_three.html", u"en", position=u"last-child", published=True, in_navigation=True) page_one_ph_one = page_one.placeholders.get(slot=u"col_sidebar") # add the text plugin to placeholder one text_plugin_en = add_plugin(page_one_ph_one, u"TextPlugin", u"en", body=u"Hello World") superuser = self.get_superuser() with self.login_user_context(superuser): # now move the parent text plugin to another placeholder post_data = { 'placeholder_id': page_one_ph_one.id, 'plugin_type': 'LinkPlugin', 'plugin_language': 'en', 'plugin_parent': text_plugin_en.pk, } add_url = URL_CMS_ADD_PLUGIN % page_one.pk response = self.client.post(add_url, post_data) self.assertEqual(response.status_code, 200) link_plugin = CMSPlugin.objects.get(parent_id=text_plugin_en.pk) self.assertEqual(link_plugin.parent_id, text_plugin_en.pk) self.assertEqual(link_plugin.path, '00010001')
bsd-3-clause
xfournet/intellij-community
python/helpers/pydev/_pydevd_bundle/pydevd_dont_trace.py
23
3696
''' Support for a tag that allows skipping over functions while debugging. ''' import linecache import re # To suppress tracing a method, add the tag @DontTrace # to a comment either preceding or on the same line as # the method definition # # E.g.: # #@DontTrace # def test1(): # pass # # ... or ... # # def test2(): #@DontTrace # pass DONT_TRACE_TAG = '@DontTrace' # Regular expression to match a decorator (at the beginning # of a line). RE_DECORATOR = re.compile(r'^\s*@') # Mapping from code object to bool. # If the key exists, the value is the cached result of should_trace_hook _filename_to_ignored_lines = {} def default_should_trace_hook(frame, filename): ''' Return True if this frame should be traced, False if tracing should be blocked. ''' # First, check whether this code object has a cached value ignored_lines = _filename_to_ignored_lines.get(filename) if ignored_lines is None: # Now, look up that line of code and check for a @DontTrace # preceding or on the same line as the method. # E.g.: # #@DontTrace # def test(): # pass # ... or ... # def test(): #@DontTrace # pass ignored_lines = {} lines = linecache.getlines(filename) i_line = 0 # Could use enumerate, but not there on all versions... for line in lines: j = line.find('#') if j >= 0: comment = line[j:] if DONT_TRACE_TAG in comment: ignored_lines[i_line] = 1 #Note: when it's found in the comment, mark it up and down for the decorator lines found. k = i_line - 1 while k >= 0: if RE_DECORATOR.match(lines[k]): ignored_lines[k] = 1 k -= 1 else: break k = i_line + 1 while k <= len(lines): if RE_DECORATOR.match(lines[k]): ignored_lines[k] = 1 k += 1 else: break i_line += 1 _filename_to_ignored_lines[filename] = ignored_lines func_line = frame.f_code.co_firstlineno - 1 # co_firstlineno is 1-based, so -1 is needed return not ( func_line - 1 in ignored_lines or #-1 to get line before method func_line in ignored_lines) #method line should_trace_hook = None def clear_trace_filter_cache(): ''' Clear the trace filter cache. Call this after reloading. ''' global should_trace_hook try: # Need to temporarily disable a hook because otherwise # _filename_to_ignored_lines.clear() will never complete. old_hook = should_trace_hook should_trace_hook = None # Clear the linecache linecache.clearcache() _filename_to_ignored_lines.clear() finally: should_trace_hook = old_hook def trace_filter(mode): ''' Set the trace filter mode. mode: Whether to enable the trace hook. True: Trace filtering on (skipping methods tagged @DontTrace) False: Trace filtering off (trace methods tagged @DontTrace) None/default: Toggle trace filtering. ''' global should_trace_hook if mode is None: mode = should_trace_hook is None if mode: should_trace_hook = default_should_trace_hook else: should_trace_hook = None return mode
apache-2.0
msimacek/freeipa
ipatests/test_integration/test_topologies.py
5
2450
# Authors: # Petr Viktorin <pviktori@redhat.com> # # Copyright (C) 2013 Red Hat # see file 'COPYING' for use and warranty information # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from ipatests.test_integration import tasks def test_topology_star(): topo = tasks.get_topo('star') assert topo == tasks.star_topo assert list(topo('M', [1, 2, 3, 4, 5])) == [ ('M', 1), ('M', 2), ('M', 3), ('M', 4), ('M', 5), ] assert list(topo('M', [])) == [] def test_topology_line(): topo = tasks.get_topo('line') assert topo == tasks.line_topo assert list(topo('M', [1, 2, 3, 4, 5])) == [ ('M', 1), (1, 2), (2, 3), (3, 4), (4, 5), ] assert list(topo('M', [])) == [] def test_topology_tree(): topo = tasks.get_topo('tree') assert topo == tasks.tree_topo assert list(topo('M', [1, 2, 3, 4, 5])) == [ ('M', 1), ('M', 2), (1, 3), (1, 4), (2, 5), ] assert list(topo('M', [1, 2, 3, 4, 5, 6, 7, 8, 9, 10])) == [ ('M', 1), ('M', 2), (1, 3), (1, 4), (2, 5), (2, 6), (3, 7), (3, 8), (4, 9), (4, 10), ] assert list(topo('M', [])) == [] def test_topology_tree2(): topo = tasks.get_topo('tree2') assert topo == tasks.tree2_topo assert list(topo('M', [1, 2, 3, 4, 5])) == [ ('M', 1), ('M', 2), (2, 3), (3, 4), (4, 5), ] assert list(topo('M', [])) == [] def test_topology_complete(): topo = tasks.get_topo('complete') assert topo == tasks.complete_topo assert list(topo('M', [1, 2, 3])) == [ ('M', 1), ('M', 2), ('M', 3), (1, 2), (1, 3), (2, 3), ] assert list(topo('M', [])) == []
gpl-3.0
trishnaguha/ansible
lib/ansible/modules/storage/netapp/na_elementsw_cluster_pair.py
14
6385
#!/usr/bin/python # (c) 2018, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = ''' module: na_elementsw_cluster_pair short_description: NetApp Element Software Manage Cluster Pair extends_documentation_fragment: - netapp.solidfire version_added: '2.7' author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> description: - Create, delete cluster pair options: state: description: - Whether the specified cluster pair should exist or not. choices: ['present', 'absent'] default: present dest_mvip: description: - Destination IP address of the cluster to be paired. required: true dest_username: description: - Destination username for the cluster to be paired. - Optional if this is same as source cluster username. dest_password: description: - Destination password for the cluster to be paired. - Optional if this is same as source cluster password. ''' EXAMPLES = """ - name: Create cluster pair na_elementsw_cluster_pair: hostname: "{{ src_hostname }}" username: "{{ src_username }}" password: "{{ src_password }}" state: present dest_mvip: "{{ dest_hostname }}" - name: Delete cluster pair na_elementsw_cluster_pair: hostname: "{{ src_hostname }}" username: "{{ src_username }}" password: "{{ src_password }}" state: absent ddest_mvip: "{{ dest_hostname }}" dest_username: "{{ dest_username }}" dest_password: "{{ dest_password }}" """ RETURN = """ """ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native import ansible.module_utils.netapp as netapp_utils from ansible.module_utils.netapp_elementsw_module import NaElementSWModule from ansible.module_utils.netapp_module import NetAppModule HAS_SF_SDK = netapp_utils.has_sf_sdk() try: import solidfire.common except Exception: HAS_SF_SDK = False class ElementSWClusterPair(object): """ class to handle cluster pairing operations """ def __init__(self): """ Setup Ansible parameters and ElementSW connection """ self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() self.argument_spec.update(dict( state=dict(required=False, choices=['present', 'absent'], default='present'), dest_mvip=dict(required=True, type='str'), dest_username=dict(required=False, type='str'), dest_password=dict(required=False, type='str', no_log=True) )) self.module = AnsibleModule( argument_spec=self.argument_spec, supports_check_mode=True ) if HAS_SF_SDK is False: self.module.fail_json(msg="Unable to import the SolidFire Python SDK") else: self.elem = netapp_utils.create_sf_connection(module=self.module) self.elementsw_helper = NaElementSWModule(self.elem) self.na_helper = NetAppModule() self.parameters = self.na_helper.set_parameters(self.module.params) # get element_sw_connection for destination cluster # overwrite existing source host, user and password with destination credentials self.module.params['hostname'] = self.parameters['dest_mvip'] # username and password is same as source, # if dest_username and dest_password aren't specified if self.parameters.get('dest_username'): self.module.params['username'] = self.parameters['dest_username'] if self.parameters.get('dest_password'): self.module.params['password'] = self.parameters['dest_password'] self.dest_elem = netapp_utils.create_sf_connection(module=self.module) self.dest_elementsw_helper = NaElementSWModule(self.dest_elem) def check_if_already_paired(self): """ Check for idempotency """ # src cluster and dest cluster exist paired_clusters = self.elem.list_cluster_pairs() for pair in paired_clusters.cluster_pairs: if pair.mvip == self.parameters['dest_mvip']: return pair.cluster_pair_id return None def pair_clusters(self): """ Start cluster pairing on source, and complete on target cluster """ try: pair_key = self.elem.start_cluster_pairing() self.dest_elem.complete_cluster_pairing( cluster_pairing_key=pair_key.cluster_pairing_key) except solidfire.common.ApiServerError as err: self.module.fail_json(msg="Error pairing cluster %s and %s" % (self.parameters['hostname'], self.parameters['dest_mvip']), exception=to_native(err)) def unpair_clusters(self, pair_id): """ Delete cluster pair """ try: self.elem.remove_cluster_pair(cluster_pair_id=pair_id) self.dest_elem.remove_cluster_pair(cluster_pair_id=pair_id) except solidfire.common.ApiServerError as err: self.module.fail_json(msg="Error unpairing cluster %s and %s" % (self.parameters['hostname'], self.parameters['dest_mvip']), exception=to_native(err)) def apply(self): """ Call create / delete cluster pair methods """ pair_id = self.check_if_already_paired() # calling helper to determine action cd_action = self.na_helper.get_cd_action(pair_id, self.parameters) if cd_action == "create": self.pair_clusters() elif cd_action == "delete": self.unpair_clusters(pair_id) self.module.exit_json(changed=self.na_helper.changed) def main(): """ Apply cluster pair actions """ cluster_obj = ElementSWClusterPair() cluster_obj.apply() if __name__ == '__main__': main()
gpl-3.0
trishnaguha/ansible
lib/ansible/modules/cloud/google/gce_labels.py
103
12673
#!/usr/bin/python # Copyright 2017 Google Inc. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gce_labels version_added: '2.4' short_description: Create, Update or Destroy GCE Labels. description: - Create, Update or Destroy GCE Labels on instances, disks, snapshots, etc. When specifying the GCE resource, users may specifiy the full URL for the resource (its 'self_link'), or the individual parameters of the resource (type, location, name). Examples for the two options can be seen in the documentaion. See U(https://cloud.google.com/compute/docs/label-or-tag-resources) for more information about GCE Labels. Labels are gradually being added to more GCE resources, so this module will need to be updated as new resources are added to the GCE (v1) API. requirements: - 'python >= 2.6' - 'google-api-python-client >= 1.6.2' - 'google-auth >= 1.0.0' - 'google-auth-httplib2 >= 0.0.2' notes: - Labels support resources such as instances, disks, images, etc. See U(https://cloud.google.com/compute/docs/labeling-resources) for the list of resources available in the GCE v1 API (not alpha or beta). author: - 'Eric Johnson (@erjohnso) <erjohnso@google.com>' options: labels: description: - A list of labels (key/value pairs) to add or remove for the resource. required: false resource_url: description: - The 'self_link' for the resource (instance, disk, snapshot, etc) required: false resource_type: description: - The type of resource (instances, disks, snapshots, images) required: false resource_location: description: - The location of resource (global, us-central1-f, etc.) required: false resource_name: description: - The name of resource. required: false ''' EXAMPLES = ''' - name: Add labels on an existing instance (using resource_url) gce_labels: service_account_email: "{{ service_account_email }}" credentials_file: "{{ credentials_file }}" project_id: "{{ project_id }}" labels: webserver-frontend: homepage environment: test experiment-name: kennedy resource_url: https://www.googleapis.com/compute/beta/projects/myproject/zones/us-central1-f/instances/example-instance state: present - name: Add labels on an image (using resource params) gce_labels: service_account_email: "{{ service_account_email }}" credentials_file: "{{ credentials_file }}" project_id: "{{ project_id }}" labels: webserver-frontend: homepage environment: test experiment-name: kennedy resource_type: images resource_location: global resource_name: my-custom-image state: present - name: Remove specified labels from the GCE instance gce_labels: service_account_email: "{{ service_account_email }}" credentials_file: "{{ credentials_file }}" project_id: "{{ project_id }}" labels: environment: prod experiment-name: kennedy resource_url: https://www.googleapis.com/compute/beta/projects/myproject/zones/us-central1-f/instances/example-instance state: absent ''' RETURN = ''' labels: description: List of labels that exist on the resource. returned: Always. type: dict sample: [ { 'webserver-frontend': 'homepage', 'environment': 'test', 'environment-name': 'kennedy' } ] resource_url: description: The 'self_link' of the GCE resource. returned: Always. type: str sample: 'https://www.googleapis.com/compute/beta/projects/myproject/zones/us-central1-f/instances/example-instance' resource_type: description: The type of the GCE resource. returned: Always. type: str sample: instances resource_location: description: The location of the GCE resource. returned: Always. type: str sample: us-central1-f resource_name: description: The name of the GCE resource. returned: Always. type: str sample: my-happy-little-instance state: description: state of the labels returned: Always. type: str sample: present ''' try: from ast import literal_eval HAS_PYTHON26 = True except ImportError: HAS_PYTHON26 = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.gcp import check_params, get_google_api_client, GCPUtils UA_PRODUCT = 'ansible-gce_labels' UA_VERSION = '0.0.1' GCE_API_VERSION = 'v1' # TODO(all): As Labels are added to more GCE resources, this list will need to # be updated (along with some code changes below). The list can *only* include # resources from the 'v1' GCE API and will *not* work with 'beta' or 'alpha'. KNOWN_RESOURCES = ['instances', 'disks', 'snapshots', 'images'] def _fetch_resource(client, module): params = module.params if params['resource_url']: if not params['resource_url'].startswith('https://www.googleapis.com/compute'): module.fail_json( msg='Invalid self_link url: %s' % params['resource_url']) else: parts = params['resource_url'].split('/')[8:] if len(parts) == 2: resource_type, resource_name = parts resource_location = 'global' else: resource_location, resource_type, resource_name = parts else: if not params['resource_type'] or not params['resource_location'] \ or not params['resource_name']: module.fail_json(msg='Missing required resource params.') resource_type = params['resource_type'].lower() resource_name = params['resource_name'].lower() resource_location = params['resource_location'].lower() if resource_type not in KNOWN_RESOURCES: module.fail_json(msg='Unsupported resource_type: %s' % resource_type) # TODO(all): See the comment above for KNOWN_RESOURCES. As labels are # added to the v1 GCE API for more resources, some minor code work will # need to be added here. if resource_type == 'instances': resource = client.instances().get(project=params['project_id'], zone=resource_location, instance=resource_name).execute() elif resource_type == 'disks': resource = client.disks().get(project=params['project_id'], zone=resource_location, disk=resource_name).execute() elif resource_type == 'snapshots': resource = client.snapshots().get(project=params['project_id'], snapshot=resource_name).execute() elif resource_type == 'images': resource = client.images().get(project=params['project_id'], image=resource_name).execute() else: module.fail_json(msg='Unsupported resource type: %s' % resource_type) return resource.get('labelFingerprint', ''), { 'resource_name': resource.get('name'), 'resource_url': resource.get('selfLink'), 'resource_type': resource_type, 'resource_location': resource_location, 'labels': resource.get('labels', {}) } def _set_labels(client, new_labels, module, ri, fingerprint): params = module.params result = err = None labels = { 'labels': new_labels, 'labelFingerprint': fingerprint } # TODO(all): See the comment above for KNOWN_RESOURCES. As labels are # added to the v1 GCE API for more resources, some minor code work will # need to be added here. if ri['resource_type'] == 'instances': req = client.instances().setLabels(project=params['project_id'], instance=ri['resource_name'], zone=ri['resource_location'], body=labels) elif ri['resource_type'] == 'disks': req = client.disks().setLabels(project=params['project_id'], zone=ri['resource_location'], resource=ri['resource_name'], body=labels) elif ri['resource_type'] == 'snapshots': req = client.snapshots().setLabels(project=params['project_id'], resource=ri['resource_name'], body=labels) elif ri['resource_type'] == 'images': req = client.images().setLabels(project=params['project_id'], resource=ri['resource_name'], body=labels) else: module.fail_json(msg='Unsupported resource type: %s' % ri['resource_type']) # TODO(erjohnso): Once Labels goes GA, we'll be able to use the GCPUtils # method to poll for the async request/operation to complete before # returning. However, during 'beta', we are in an odd state where # API requests must be sent to the 'compute/beta' API, but the python # client library only allows for *Operations.get() requests to be # sent to 'compute/v1' API. The response operation is in the 'beta' # API-scope, but the client library cannot find the operation (404). # result = GCPUtils.execute_api_client_req(req, client=client, raw=False) # return result, err result = req.execute() return True, err def main(): module = AnsibleModule( argument_spec=dict( state=dict(choices=['absent', 'present'], default='present'), service_account_email=dict(), service_account_permissions=dict(type='list'), pem_file=dict(), credentials_file=dict(), labels=dict(required=False, type='dict', default={}), resource_url=dict(required=False, type='str'), resource_name=dict(required=False, type='str'), resource_location=dict(required=False, type='str'), resource_type=dict(required=False, type='str'), project_id=dict() ), required_together=[ ['resource_name', 'resource_location', 'resource_type'] ], mutually_exclusive=[ ['resource_url', 'resource_name'], ['resource_url', 'resource_location'], ['resource_url', 'resource_type'] ] ) if not HAS_PYTHON26: module.fail_json( msg="GCE module requires python's 'ast' module, python v2.6+") client, cparams = get_google_api_client(module, 'compute', user_agent_product=UA_PRODUCT, user_agent_version=UA_VERSION, api_version=GCE_API_VERSION) # Get current resource info including labelFingerprint fingerprint, resource_info = _fetch_resource(client, module) new_labels = resource_info['labels'].copy() update_needed = False if module.params['state'] == 'absent': for k, v in module.params['labels'].items(): if k in new_labels: if new_labels[k] == v: update_needed = True new_labels.pop(k, None) else: module.fail_json(msg="Could not remove unmatched label pair '%s':'%s'" % (k, v)) else: for k, v in module.params['labels'].items(): if k not in new_labels: update_needed = True new_labels[k] = v changed = False json_output = {'state': module.params['state']} if update_needed: changed, err = _set_labels(client, new_labels, module, resource_info, fingerprint) json_output['changed'] = changed # TODO(erjohnso): probably want to re-fetch the resource to return the # new labelFingerprint, check that desired labels match updated labels. # BUT! Will need to wait for setLabels() to hit v1 API so we can use the # GCPUtils feature to poll for the operation to be complete. For now, # we'll just update the output with what we have from the original # state of the resource. json_output.update(resource_info) json_output.update(module.params) module.exit_json(**json_output) if __name__ == '__main__': main()
gpl-3.0
fidomason/kbengine
kbe/src/lib/python/Tools/msi/uisample.py
36
147972
import msilib,os;dirname=os.path.dirname(__file__) AdminExecuteSequence = [ (u'InstallValidate', None, 1400), (u'InstallInitialize', None, 1500), (u'InstallFinalize', None, 6600), (u'InstallFiles', None, 4000), (u'InstallAdminPackage', None, 3900), (u'FileCost', None, 900), (u'CostInitialize', None, 800), (u'CostFinalize', None, 1000), ] AdminUISequence = [ (u'AdminWelcomeDlg', None, 1230), (u'FileCost', None, 900), (u'CostInitialize', None, 800), (u'CostFinalize', None, 1000), (u'ExecuteAction', None, 1300), (u'ExitDialog', None, -1), (u'FatalError', None, -3), (u'PrepareDlg', None, 140), (u'ProgressDlg', None, 1280), (u'UserExit', None, -2), ] AdvtExecuteSequence = [ (u'InstallValidate', None, 1400), (u'InstallInitialize', None, 1500), (u'InstallFinalize', None, 6600), (u'CostInitialize', None, 800), (u'CostFinalize', None, 1000), (u'CreateShortcuts', None, 4500), (u'PublishComponents', None, 6200), (u'PublishFeatures', None, 6300), (u'PublishProduct', None, 6400), (u'RegisterClassInfo', None, 4600), (u'RegisterExtensionInfo', None, 4700), (u'RegisterMIMEInfo', None, 4900), (u'RegisterProgIdInfo', None, 4800), ] BBControl = [ ] Billboard = [ ] Binary = [ (u'bannrbmp', msilib.Binary(os.path.join(dirname,"bannrbmp.bin"))), (u'completi', msilib.Binary(os.path.join(dirname,"completi.bin"))), (u'custicon', msilib.Binary(os.path.join(dirname,"custicon.bin"))), (u'dlgbmp', msilib.Binary(os.path.join(dirname,"dlgbmp.bin"))), (u'exclamic', msilib.Binary(os.path.join(dirname,"exclamic.bin"))), (u'info', msilib.Binary(os.path.join(dirname,"info.bin"))), (u'insticon', msilib.Binary(os.path.join(dirname,"insticon.bin"))), (u'New', msilib.Binary(os.path.join(dirname,"New.bin"))), (u'removico', msilib.Binary(os.path.join(dirname,"removico.bin"))), (u'repairic', msilib.Binary(os.path.join(dirname,"repairic.bin"))), (u'Up', msilib.Binary(os.path.join(dirname,"Up.bin"))), ] CheckBox = [ ] Property = [ (u'BannerBitmap', u'bannrbmp'), (u'IAgree', u'No'), (u'ProductID', u'none'), (u'ARPHELPLINK', u'http://www.microsoft.com/management'), (u'ButtonText_Back', u'< &Back'), (u'ButtonText_Browse', u'Br&owse'), (u'ButtonText_Cancel', u'Cancel'), (u'ButtonText_Exit', u'&Exit'), (u'ButtonText_Finish', u'&Finish'), (u'ButtonText_Ignore', u'&Ignore'), (u'ButtonText_Install', u'&Install'), (u'ButtonText_Next', u'&Next >'), (u'ButtonText_No', u'&No'), (u'ButtonText_OK', u'OK'), (u'ButtonText_Remove', u'&Remove'), (u'ButtonText_Repair', u'&Repair'), (u'ButtonText_Reset', u'&Reset'), (u'ButtonText_Resume', u'&Resume'), (u'ButtonText_Retry', u'&Retry'), (u'ButtonText_Return', u'&Return'), (u'ButtonText_Yes', u'&Yes'), (u'CompleteSetupIcon', u'completi'), (u'ComponentDownload', u'ftp://anonymous@microsoft.com/components/'), (u'CustomSetupIcon', u'custicon'), (u'DefaultUIFont', u'DlgFont8'), (u'DialogBitmap', u'dlgbmp'), (u'DlgTitleFont', u'{&DlgFontBold8}'), (u'ErrorDialog', u'ErrorDlg'), (u'ExclamationIcon', u'exclamic'), (u'InfoIcon', u'info'), (u'InstallerIcon', u'insticon'), (u'INSTALLLEVEL', u'3'), (u'InstallMode', u'Typical'), (u'PIDTemplate', u'12345<###-%%%%%%%>@@@@@'), #(u'ProductLanguage', u'1033'), (u'Progress1', u'Installing'), (u'Progress2', u'installs'), (u'PROMPTROLLBACKCOST', u'P'), (u'RemoveIcon', u'removico'), (u'RepairIcon', u'repairic'), (u'Setup', u'Setup'), (u'ShowUserRegistrationDlg', u'1'), (u'Wizard', u'Setup Wizard'), ] ComboBox = [ ] Control = [ (u'AdminWelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), (u'AdminWelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'AdminWelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), (u'AdminWelcomeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will create a server image of [ProductName], at a specified network location. Click Next to continue or Cancel to exit the [Wizard].', None, None), (u'AdminWelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None), (u'AdminWelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None), (u'AdminWelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), (u'ExitDialog', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), (u'ExitDialog', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'ExitDialog', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None), (u'ExitDialog', u'Description', u'Text', 135, 70, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None), (u'ExitDialog', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Completing the [ProductName] [Wizard]', None, None), (u'ExitDialog', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None), (u'ExitDialog', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None), (u'FatalError', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), (u'FatalError', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'FatalError', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None), (u'FatalError', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}[ProductName] [Wizard] ended prematurely', None, None), (u'FatalError', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None), (u'FatalError', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None), (u'FatalError', u'Description1', u'Text', 135, 70, 220, 40, 196611, None, u'[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.', None, None), (u'FatalError', u'Description2', u'Text', 135, 115, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None), (u'PrepareDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Cancel', None), (u'PrepareDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'PrepareDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), (u'PrepareDlg', u'Description', u'Text', 135, 70, 220, 20, 196611, None, u'Please wait while the [Wizard] prepares to guide you through the installation.', None, None), (u'PrepareDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None), (u'PrepareDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', None, None), (u'PrepareDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', None, None), (u'PrepareDlg', u'ActionData', u'Text', 135, 125, 220, 30, 196611, None, None, None, None), (u'PrepareDlg', u'ActionText', u'Text', 135, 100, 220, 20, 196611, None, None, None, None), (u'ProgressDlg', u'Text', u'Text', 35, 65, 300, 20, 3, None, u'Please wait while the [Wizard] [Progress2] [ProductName]. This may take several minutes.', None, None), (u'ProgressDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None), (u'ProgressDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'ProgressDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'ProgressDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'ProgressDlg', u'Title', u'Text', 20, 15, 200, 15, 196611, None, u'[DlgTitleFont][Progress1] [ProductName]', None, None), (u'ProgressDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None), (u'ProgressDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None), (u'ProgressDlg', u'ActionText', u'Text', 70, 100, 265, 10, 3, None, None, None, None), (u'ProgressDlg', u'ProgressBar', u'ProgressBar', 35, 115, 300, 10, 65537, None, u'Progress done', None, None), (u'ProgressDlg', u'StatusLabel', u'Text', 35, 100, 35, 10, 3, None, u'Status:', None, None), (u'UserExit', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), (u'UserExit', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'UserExit', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None), (u'UserExit', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}[ProductName] [Wizard] was interrupted', None, None), (u'UserExit', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None), (u'UserExit', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None), (u'UserExit', u'Description1', u'Text', 135, 70, 220, 40, 196611, None, u'[ProductName] setup was interrupted. Your system has not been modified. To install this program at a later time, please run the installation again.', None, None), (u'UserExit', u'Description2', u'Text', 135, 115, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None), (u'AdminBrowseDlg', u'Up', u'PushButton', 298, 55, 19, 19, 3670019, None, u'Up', u'NewFolder', u'Up One Level|'), (u'AdminBrowseDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'PathEdit', None), (u'AdminBrowseDlg', u'PathEdit', u'PathEdit', 84, 202, 261, 17, 3, u'TARGETDIR', None, u'OK', None), (u'AdminBrowseDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'AdminBrowseDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'AdminBrowseDlg', u'Cancel', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'ComboLabel', None), (u'AdminBrowseDlg', u'ComboLabel', u'Text', 25, 58, 44, 10, 3, None, u'&Look in:', u'DirectoryCombo', None), (u'AdminBrowseDlg', u'DirectoryCombo', u'DirectoryCombo', 70, 55, 220, 80, 458755, u'TARGETDIR', None, u'Up', None), (u'AdminBrowseDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Browse to the destination folder', None, None), (u'AdminBrowseDlg', u'DirectoryList', u'DirectoryList', 25, 83, 320, 110, 7, u'TARGETDIR', None, u'PathLabel', None), (u'AdminBrowseDlg', u'PathLabel', u'Text', 25, 205, 59, 10, 3, None, u'&Folder name:', u'BannerBitmap', None), (u'AdminBrowseDlg', u'NewFolder', u'PushButton', 325, 55, 19, 19, 3670019, None, u'New', u'DirectoryList', u'Create A New Folder|'), (u'AdminBrowseDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'Cancel', None), (u'AdminBrowseDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Change current destination folder', None, None), (u'AdminInstallPointDlg', u'Text', u'Text', 25, 80, 320, 10, 3, None, u'&Enter a new network location or click Browse to browse to one.', u'PathEdit', None), (u'AdminInstallPointDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Text', None), (u'AdminInstallPointDlg', u'PathEdit', u'PathEdit', 25, 93, 320, 18, 3, u'TARGETDIR', None, u'Browse', None), (u'AdminInstallPointDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'AdminInstallPointDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'AdminInstallPointDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'AdminInstallPointDlg', u'Description', u'Text', 25, 20, 280, 20, 196611, None, u'Please specify a network location for the server image of [ProductName] product', None, None), (u'AdminInstallPointDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Network Location', None, None), (u'AdminInstallPointDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), (u'AdminInstallPointDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), (u'AdminInstallPointDlg', u'Browse', u'PushButton', 289, 119, 56, 17, 3, None, u'[ButtonText_Browse]', u'Back', None), (u'AdminRegistrationDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OrganizationLabel', None), (u'AdminRegistrationDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'AdminRegistrationDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'AdminRegistrationDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'AdminRegistrationDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please enter your company information', None, None), (u'AdminRegistrationDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Company Information', None, None), (u'AdminRegistrationDlg', u'Back', u'PushButton', 180, 243, 56, 17, 65539, None, u'[ButtonText_Back]', u'Next', None), (u'AdminRegistrationDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), (u'AdminRegistrationDlg', u'OrganizationLabel', u'Text', 45, 71, 285, 30, 3, None, u'&Please enter the name of your organization in the box below. This will be used as default company name for subsequent installations of [ProductName]:', u'OrganizationEdit', None), (u'AdminRegistrationDlg', u'CDKeyEdit', u'MaskedEdit', 45, 143, 250, 16, 3, u'PIDKEY', u'[PIDTemplate]', u'Back', None), (u'AdminRegistrationDlg', u'CDKeyLabel', u'Text', 45, 130, 50, 10, 3, None, u'CD &Key:', u'CDKeyEdit', None), (u'AdminRegistrationDlg', u'OrganizationEdit', u'Edit', 45, 105, 220, 18, 3, u'COMPANYNAME', u'{80}', u'CDKeyLabel', None), (u'BrowseDlg', u'Up', u'PushButton', 298, 55, 19, 19, 3670019, None, u'Up', u'NewFolder', u'Up One Level|'), (u'BrowseDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'PathEdit', None), (u'BrowseDlg', u'PathEdit', u'PathEdit', 84, 202, 261, 18, 11, u'_BrowseProperty', None, u'OK', None), (u'BrowseDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'BrowseDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'BrowseDlg', u'Cancel', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'ComboLabel', None), (u'BrowseDlg', u'ComboLabel', u'Text', 25, 58, 44, 10, 3, None, u'&Look in:', u'DirectoryCombo', None), (u'BrowseDlg', u'DirectoryCombo', u'DirectoryCombo', 70, 55, 220, 80, 393227, u'_BrowseProperty', None, u'Up', None), (u'BrowseDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Browse to the destination folder', None, None), (u'BrowseDlg', u'DirectoryList', u'DirectoryList', 25, 83, 320, 110, 15, u'_BrowseProperty', None, u'PathLabel', None), (u'BrowseDlg', u'PathLabel', u'Text', 25, 205, 59, 10, 3, None, u'&Folder name:', u'BannerBitmap', None), (u'BrowseDlg', u'NewFolder', u'PushButton', 325, 55, 19, 19, 3670019, None, u'New', u'DirectoryList', u'Create A New Folder|'), (u'BrowseDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'Cancel', None), (u'BrowseDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Change current destination folder', None, None), (u'CancelDlg', u'Text', u'Text', 48, 15, 194, 30, 3, None, u'Are you sure you want to cancel [ProductName] installation?', None, None), (u'CancelDlg', u'Icon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[InfoIcon]', None, u'Information icon|'), (u'CancelDlg', u'No', u'PushButton', 132, 57, 56, 17, 3, None, u'[ButtonText_No]', u'Yes', None), (u'CancelDlg', u'Yes', u'PushButton', 72, 57, 56, 17, 3, None, u'[ButtonText_Yes]', u'No', None), (u'CustomizeDlg', u'Text', u'Text', 25, 55, 320, 20, 3, None, u'Click on the icons in the tree below to change the way features will be installed.', None, None), (u'CustomizeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Tree', None), (u'CustomizeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'CustomizeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'CustomizeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'CustomizeDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Select the way you want features to be installed.', None, None), (u'CustomizeDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Custom Setup', None, None), (u'CustomizeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), (u'CustomizeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), (u'CustomizeDlg', u'Browse', u'PushButton', 304, 200, 56, 17, 3, None, u'[ButtonText_Browse]', u'Reset', None), (u'CustomizeDlg', u'Tree', u'SelectionTree', 25, 85, 175, 95, 7, u'_BrowseProperty', u'Tree of selections', u'Browse', None), (u'CustomizeDlg', u'Box', u'GroupBox', 210, 81, 140, 98, 1, None, None, None, None), (u'CustomizeDlg', u'Reset', u'PushButton', 42, 243, 56, 17, 3, None, u'[ButtonText_Reset]', u'DiskCost', None), (u'CustomizeDlg', u'DiskCost', u'PushButton', 111, 243, 56, 17, 3, None, u'Disk &Usage', u'Back', None), (u'CustomizeDlg', u'ItemDescription', u'Text', 215, 90, 131, 30, 3, None, u'Multiline description of the currently selected item.', None, None), (u'CustomizeDlg', u'ItemSize', u'Text', 215, 130, 131, 45, 3, None, u'The size of the currently selected item.', None, None), (u'CustomizeDlg', u'Location', u'Text', 75, 200, 215, 20, 3, None, u"<The selection's path>", None, None), (u'CustomizeDlg', u'LocationLabel', u'Text', 25, 200, 50, 10, 3, None, u'Location:', None, None), (u'DiskCostDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes (if any) do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None), (u'DiskCostDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OK', None), (u'DiskCostDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'DiskCostDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'DiskCostDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'The disk space required for the installation of the selected features.', None, None), (u'DiskCostDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'BannerBitmap', None), (u'DiskCostDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Disk Space Requirements', None, None), (u'DiskCostDlg', u'VolumeList', u'VolumeCostList', 20, 100, 330, 120, 393223, None, u'{120}{70}{70}{70}{70}', None, None), (u'ErrorDlg', u'Y', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Yes]', None, None), (u'ErrorDlg', u'A', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Cancel]', None, None), (u'ErrorDlg', u'C', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Cancel]', None, None), (u'ErrorDlg', u'ErrorIcon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[InfoIcon]', None, u'Information icon|'), (u'ErrorDlg', u'ErrorText', u'Text', 48, 15, 205, 60, 3, None, u'Information text', None, None), (u'ErrorDlg', u'I', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Ignore]', None, None), (u'ErrorDlg', u'N', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_No]', None, None), (u'ErrorDlg', u'O', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_OK]', None, None), (u'ErrorDlg', u'R', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Retry]', None, None), (u'FilesInUse', u'Text', u'Text', 20, 55, 330, 30, 3, None, u'The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.', None, None), (u'FilesInUse', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Retry', None), (u'FilesInUse', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'FilesInUse', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'FilesInUse', u'Description', u'Text', 20, 23, 280, 20, 196611, None, u'Some files that need to be updated are currently in use.', None, None), (u'FilesInUse', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Files in Use', None, None), (u'FilesInUse', u'Retry', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Retry]', u'Ignore', None), (u'FilesInUse', u'Exit', u'PushButton', 166, 243, 56, 17, 3, None, u'[ButtonText_Exit]', u'BannerBitmap', None), (u'FilesInUse', u'Ignore', u'PushButton', 235, 243, 56, 17, 3, None, u'[ButtonText_Ignore]', u'Exit', None), (u'FilesInUse', u'List', u'ListBox', 20, 87, 330, 130, 7, u'FileInUseProcess', None, None, None), (u'LicenseAgreementDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'AgreementText', None), (u'LicenseAgreementDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'LicenseAgreementDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'LicenseAgreementDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'LicenseAgreementDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please read the following license agreement carefully', None, None), (u'LicenseAgreementDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]End-User License Agreement', None, None), (u'LicenseAgreementDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), (u'LicenseAgreementDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), (u'LicenseAgreementDlg', u'AgreementText', u'ScrollableText', 20, 60, 330, 120, 7, None, u'{\\rtf1\\ansi\\ansicpg1252\\deff0\\deftab720{\\fonttbl{\\f0\\froman\\fprq2 Times New Roman;}}{\\colortbl\\red0\\green0\\blue0;} \\deflang1033\\horzdoc{\\*\\fchars }{\\*\\lchars }\\pard\\plain\\f0\\fs20 <Your license agreement should go here.>\\par }', u'Buttons', None), (u'LicenseAgreementDlg', u'Buttons', u'RadioButtonGroup', 20, 187, 330, 40, 3, u'IAgree', None, u'Back', None), (u'MaintenanceTypeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'ChangeLabel', None), (u'MaintenanceTypeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'MaintenanceTypeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'MaintenanceTypeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'MaintenanceTypeDlg', u'Description', u'Text', 25, 23, 280, 20, 196611, None, u'Select the operation you wish to perform.', None, None), (u'MaintenanceTypeDlg', u'Title', u'Text', 15, 6, 240, 15, 196611, None, u'[DlgTitleFont]Modify, Repair or Remove installation', None, None), (u'MaintenanceTypeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), (u'MaintenanceTypeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None), (u'MaintenanceTypeDlg', u'ChangeLabel', u'Text', 105, 65, 100, 10, 3, None, u'[DlgTitleFont]&Modify', u'ChangeButton', None), (u'MaintenanceTypeDlg', u'ChangeButton', u'PushButton', 50, 65, 38, 38, 5767171, None, u'[CustomSetupIcon]', u'RepairLabel', u'Modify Installation|'), (u'MaintenanceTypeDlg', u'RepairLabel', u'Text', 105, 114, 100, 10, 3, None, u'[DlgTitleFont]Re&pair', u'RepairButton', None), (u'MaintenanceTypeDlg', u'ChangeText', u'Text', 105, 78, 230, 20, 3, None, u'Allows users to change the way features are installed.', None, None), (u'MaintenanceTypeDlg', u'RemoveButton', u'PushButton', 50, 163, 38, 38, 5767171, None, u'[RemoveIcon]', u'Back', u'Remove Installation|'), (u'MaintenanceTypeDlg', u'RemoveLabel', u'Text', 105, 163, 100, 10, 3, None, u'[DlgTitleFont]&Remove', u'RemoveButton', None), (u'MaintenanceTypeDlg', u'RemoveText', u'Text', 105, 176, 230, 20, 3, None, u'Removes [ProductName] from your computer.', None, None), (u'MaintenanceTypeDlg', u'RepairButton', u'PushButton', 50, 114, 38, 38, 5767171, None, u'[RepairIcon]', u'RemoveLabel', u'Repair Installation|'), (u'MaintenanceTypeDlg', u'RepairText', u'Text', 105, 127, 230, 30, 3, None, u'Repairs errors in the most recent installation state - fixes missing or corrupt files, shortcuts and registry entries.', None, None), (u'MaintenanceWelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), (u'MaintenanceWelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'MaintenanceWelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), (u'MaintenanceWelcomeDlg', u'Description', u'Text', 135, 70, 220, 60, 196611, None, u'The [Wizard] will allow you to change the way [ProductName] features are installed on your computer or even to remove [ProductName] from your computer. Click Next to continue or Cancel to exit the [Wizard].', None, None), (u'MaintenanceWelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None), (u'MaintenanceWelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None), (u'MaintenanceWelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), (u'OutOfDiskDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None), (u'OutOfDiskDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OK', None), (u'OutOfDiskDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'OutOfDiskDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'OutOfDiskDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'Disk space required for the installation exceeds available disk space.', None, None), (u'OutOfDiskDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'BannerBitmap', None), (u'OutOfDiskDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Out of Disk Space', None, None), (u'OutOfDiskDlg', u'VolumeList', u'VolumeCostList', 20, 100, 330, 120, 393223, None, u'{120}{70}{70}{70}{70}', None, None), (u'OutOfRbDiskDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None), (u'OutOfRbDiskDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'No', None), (u'OutOfRbDiskDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'OutOfRbDiskDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'OutOfRbDiskDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'Disk space required for the installation exceeds available disk space.', None, None), (u'OutOfRbDiskDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Out of Disk Space', None, None), (u'OutOfRbDiskDlg', u'No', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_No]', u'Yes', None), (u'OutOfRbDiskDlg', u'Yes', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Yes]', u'BannerBitmap', None), (u'OutOfRbDiskDlg', u'VolumeList', u'VolumeCostList', 20, 140, 330, 80, 4587527, None, u'{120}{70}{70}{70}{70}', None, None), (u'OutOfRbDiskDlg', u'Text2', u'Text', 20, 94, 330, 40, 3, None, u"Alternatively, you may choose to disable the installer's rollback functionality. This allows the installer to restore your computer's original state should the installation be interrupted in any way. Click Yes if you wish to take the risk to disable rollback.", None, None), (u'ResumeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), (u'ResumeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'ResumeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), (u'ResumeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will complete the installation of [ProductName] on your computer. Click Install to continue or Cancel to exit the [Wizard].', None, None), (u'ResumeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Resuming the [ProductName] [Wizard]', None, None), (u'ResumeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Install', None), (u'ResumeDlg', u'Install', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Install]', u'Cancel', None), (u'SetupTypeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'TypicalLabel', None), (u'SetupTypeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'SetupTypeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'SetupTypeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'SetupTypeDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Choose the setup type that best suits your needs', None, None), (u'SetupTypeDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Choose Setup Type', None, None), (u'SetupTypeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), (u'SetupTypeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None), (u'SetupTypeDlg', u'TypicalLabel', u'Text', 105, 65, 100, 10, 3, None, u'[DlgTitleFont]&Typical', u'TypicalButton', None), (u'SetupTypeDlg', u'CompleteButton', u'PushButton', 50, 171, 38, 38, 5767171, None, u'[CompleteSetupIcon]', u'Back', u'Complete Installation|'), (u'SetupTypeDlg', u'CompleteLabel', u'Text', 105, 171, 100, 10, 3, None, u'[DlgTitleFont]C&omplete', u'CompleteButton', None), (u'SetupTypeDlg', u'CompleteText', u'Text', 105, 184, 230, 20, 3, None, u'All program features will be installed. (Requires most disk space)', None, None), (u'SetupTypeDlg', u'CustomButton', u'PushButton', 50, 118, 38, 38, 5767171, None, u'[CustomSetupIcon]', u'CompleteLabel', u'Custom Installation|'), (u'SetupTypeDlg', u'CustomLabel', u'Text', 105, 118, 100, 10, 3, None, u'[DlgTitleFont]C&ustom', u'CustomButton', None), (u'SetupTypeDlg', u'CustomText', u'Text', 105, 131, 230, 30, 3, None, u'Allows users to choose which program features will be installed and where they will be installed. Recommended for advanced users.', None, None), (u'SetupTypeDlg', u'TypicalButton', u'PushButton', 50, 65, 38, 38, 5767171, None, u'[InstallerIcon]', u'CustomLabel', u'Typical Installation|'), (u'SetupTypeDlg', u'TypicalText', u'Text', 105, 78, 230, 20, 3, None, u'Installs the most common program features. Recommended for most users.', None, None), (u'UserRegistrationDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'NameLabel', None), (u'UserRegistrationDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'UserRegistrationDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'UserRegistrationDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'UserRegistrationDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please enter your customer information', None, None), (u'UserRegistrationDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Customer Information', None, None), (u'UserRegistrationDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None), (u'UserRegistrationDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), (u'UserRegistrationDlg', u'OrganizationLabel', u'Text', 45, 110, 100, 15, 3, None, u'&Organization:', u'OrganizationEdit', None), (u'UserRegistrationDlg', u'CDKeyEdit', u'MaskedEdit', 45, 159, 250, 16, 3, u'PIDKEY', u'[PIDTemplate]', u'Back', None), (u'UserRegistrationDlg', u'CDKeyLabel', u'Text', 45, 147, 50, 10, 3, None, u'CD &Key:', u'CDKeyEdit', None), (u'UserRegistrationDlg', u'OrganizationEdit', u'Edit', 45, 122, 220, 18, 3, u'COMPANYNAME', u'{80}', u'CDKeyLabel', None), (u'UserRegistrationDlg', u'NameLabel', u'Text', 45, 73, 100, 15, 3, None, u'&User Name:', u'NameEdit', None), (u'UserRegistrationDlg', u'NameEdit', u'Edit', 45, 85, 220, 18, 3, u'USERNAME', u'{80}', u'OrganizationLabel', None), (u'VerifyReadyDlg', u'Text', u'Text', 25, 70, 320, 20, 3, None, u'Click Install to begin the installation. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None), (u'VerifyReadyDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None), (u'VerifyReadyDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'VerifyReadyDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'VerifyReadyDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'VerifyReadyDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'The [Wizard] is ready to begin the [InstallMode] installation', None, None), (u'VerifyReadyDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Ready to Install', None, None), (u'VerifyReadyDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Install', None), (u'VerifyReadyDlg', u'Install', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Install]', u'Cancel', None), (u'VerifyRemoveDlg', u'Text', u'Text', 25, 70, 320, 30, 3, None, u'Click Remove to remove [ProductName] from your computer. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None), (u'VerifyRemoveDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None), (u'VerifyRemoveDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'VerifyRemoveDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'VerifyRemoveDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'VerifyRemoveDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'You have chosen to remove the program from your computer.', None, None), (u'VerifyRemoveDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Remove [ProductName]', None, None), (u'VerifyRemoveDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Remove', None), (u'VerifyRemoveDlg', u'Remove', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Remove]', u'Cancel', None), (u'VerifyRepairDlg', u'Text', u'Text', 25, 70, 320, 30, 3, None, u'Click Repair to repair the installation of [ProductName]. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None), (u'VerifyRepairDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None), (u'VerifyRepairDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None), (u'VerifyRepairDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'VerifyRepairDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None), (u'VerifyRepairDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'The [Wizard] is ready to begin the repair of [ProductName].', None, None), (u'VerifyRepairDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Repair [ProductName]', None, None), (u'VerifyRepairDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Repair', None), (u'VerifyRepairDlg', u'Repair', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Repair]', u'Cancel', None), (u'WaitForCostingDlg', u'Text', u'Text', 48, 15, 194, 30, 3, None, u'Please wait while the installer finishes determining your disk space requirements.', None, None), (u'WaitForCostingDlg', u'Icon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[ExclamationIcon]', None, u'Exclamation icon|'), (u'WaitForCostingDlg', u'Return', u'PushButton', 102, 57, 56, 17, 3, None, u'[ButtonText_Return]', None, None), (u'WelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None), (u'WelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None), (u'WelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None), (u'WelcomeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will install [ProductName] on your computer. Click Next to continue or Cancel to exit the [Wizard].', None, None), (u'WelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None), (u'WelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None), (u'WelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None), ] ListBox = [ ] ActionText = [ (u'InstallValidate', u'Validating install', None), (u'InstallFiles', u'Copying new files', u'File: [1], Directory: [9], Size: [6]'), (u'InstallAdminPackage', u'Copying network install files', u'File: [1], Directory: [9], Size: [6]'), (u'FileCost', u'Computing space requirements', None), (u'CostInitialize', u'Computing space requirements', None), (u'CostFinalize', u'Computing space requirements', None), (u'CreateShortcuts', u'Creating shortcuts', u'Shortcut: [1]'), (u'PublishComponents', u'Publishing Qualified Components', u'Component ID: [1], Qualifier: [2]'), (u'PublishFeatures', u'Publishing Product Features', u'Feature: [1]'), (u'PublishProduct', u'Publishing product information', None), (u'RegisterClassInfo', u'Registering Class servers', u'Class Id: [1]'), (u'RegisterExtensionInfo', u'Registering extension servers', u'Extension: [1]'), (u'RegisterMIMEInfo', u'Registering MIME info', u'MIME Content Type: [1], Extension: [2]'), (u'RegisterProgIdInfo', u'Registering program identifiers', u'ProgId: [1]'), (u'AllocateRegistrySpace', u'Allocating registry space', u'Free space: [1]'), (u'AppSearch', u'Searching for installed applications', u'Property: [1], Signature: [2]'), (u'BindImage', u'Binding executables', u'File: [1]'), (u'CCPSearch', u'Searching for qualifying products', None), (u'CreateFolders', u'Creating folders', u'Folder: [1]'), (u'DeleteServices', u'Deleting services', u'Service: [1]'), (u'DuplicateFiles', u'Creating duplicate files', u'File: [1], Directory: [9], Size: [6]'), (u'FindRelatedProducts', u'Searching for related applications', u'Found application: [1]'), (u'InstallODBC', u'Installing ODBC components', None), (u'InstallServices', u'Installing new services', u'Service: [2]'), (u'LaunchConditions', u'Evaluating launch conditions', None), (u'MigrateFeatureStates', u'Migrating feature states from related applications', u'Application: [1]'), (u'MoveFiles', u'Moving files', u'File: [1], Directory: [9], Size: [6]'), (u'PatchFiles', u'Patching files', u'File: [1], Directory: [2], Size: [3]'), (u'ProcessComponents', u'Updating component registration', None), (u'RegisterComPlus', u'Registering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2], Users: [3], RSN: [4]}}'), (u'RegisterFonts', u'Registering fonts', u'Font: [1]'), (u'RegisterProduct', u'Registering product', u'[1]'), (u'RegisterTypeLibraries', u'Registering type libraries', u'LibID: [1]'), (u'RegisterUser', u'Registering user', u'[1]'), (u'RemoveDuplicateFiles', u'Removing duplicated files', u'File: [1], Directory: [9]'), (u'RemoveEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'), (u'RemoveExistingProducts', u'Removing applications', u'Application: [1], Command line: [2]'), (u'RemoveFiles', u'Removing files', u'File: [1], Directory: [9]'), (u'RemoveFolders', u'Removing folders', u'Folder: [1]'), (u'RemoveIniValues', u'Removing INI files entries', u'File: [1], Section: [2], Key: [3], Value: [4]'), (u'RemoveODBC', u'Removing ODBC components', None), (u'RemoveRegistryValues', u'Removing system registry values', u'Key: [1], Name: [2]'), (u'RemoveShortcuts', u'Removing shortcuts', u'Shortcut: [1]'), (u'RMCCPSearch', u'Searching for qualifying products', None), (u'SelfRegModules', u'Registering modules', u'File: [1], Folder: [2]'), (u'SelfUnregModules', u'Unregistering modules', u'File: [1], Folder: [2]'), (u'SetODBCFolders', u'Initializing ODBC directories', None), (u'StartServices', u'Starting services', u'Service: [1]'), (u'StopServices', u'Stopping services', u'Service: [1]'), (u'UnpublishComponents', u'Unpublishing Qualified Components', u'Component ID: [1], Qualifier: [2]'), (u'UnpublishFeatures', u'Unpublishing Product Features', u'Feature: [1]'), (u'UnregisterClassInfo', u'Unregister Class servers', u'Class Id: [1]'), (u'UnregisterComPlus', u'Unregistering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2]}}'), (u'UnregisterExtensionInfo', u'Unregistering extension servers', u'Extension: [1]'), (u'UnregisterFonts', u'Unregistering fonts', u'Font: [1]'), (u'UnregisterMIMEInfo', u'Unregistering MIME info', u'MIME Content Type: [1], Extension: [2]'), (u'UnregisterProgIdInfo', u'Unregistering program identifiers', u'ProgId: [1]'), (u'UnregisterTypeLibraries', u'Unregistering type libraries', u'LibID: [1]'), (u'WriteEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'), (u'WriteIniValues', u'Writing INI files values', u'File: [1], Section: [2], Key: [3], Value: [4]'), (u'WriteRegistryValues', u'Writing system registry values', u'Key: [1], Name: [2], Value: [3]'), (u'Advertise', u'Advertising application', None), (u'GenerateScript', u'Generating script operations for action:', u'[1]'), (u'InstallSFPCatalogFile', u'Installing system catalog', u'File: [1], Dependencies: [2]'), (u'MsiPublishAssemblies', u'Publishing assembly information', u'Application Context:[1], Assembly Name:[2]'), (u'MsiUnpublishAssemblies', u'Unpublishing assembly information', u'Application Context:[1], Assembly Name:[2]'), (u'Rollback', u'Rolling back action:', u'[1]'), (u'RollbackCleanup', u'Removing backup files', u'File: [1]'), (u'UnmoveFiles', u'Removing moved files', u'File: [1], Directory: [9]'), (u'UnpublishProduct', u'Unpublishing product information', None), ] ControlCondition = [ (u'CustomizeDlg', u'Browse', u'Hide', u'Installed'), (u'CustomizeDlg', u'Location', u'Hide', u'Installed'), (u'CustomizeDlg', u'LocationLabel', u'Hide', u'Installed'), (u'LicenseAgreementDlg', u'Next', u'Disable', u'IAgree <> "Yes"'), (u'LicenseAgreementDlg', u'Next', u'Enable', u'IAgree = "Yes"'), ] ControlEvent = [ (u'AdminWelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'AdminWelcomeDlg', u'Next', u'NewDialog', u'AdminRegistrationDlg', u'1', 2), (u'AdminWelcomeDlg', u'Next', u'[InstallMode]', u'Server Image', u'1', 1), (u'ExitDialog', u'Finish', u'EndDialog', u'Return', u'1', None), (u'FatalError', u'Finish', u'EndDialog', u'Exit', u'1', None), (u'PrepareDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'ProgressDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'UserExit', u'Finish', u'EndDialog', u'Exit', u'1', None), (u'AdminBrowseDlg', u'Up', u'DirectoryListUp', u'0', u'1', None), (u'AdminBrowseDlg', u'Cancel', u'Reset', u'0', u'1', 1), (u'AdminBrowseDlg', u'Cancel', u'EndDialog', u'Return', u'1', 2), (u'AdminBrowseDlg', u'NewFolder', u'DirectoryListNew', u'0', u'1', None), (u'AdminBrowseDlg', u'OK', u'EndDialog', u'Return', u'1', 2), (u'AdminBrowseDlg', u'OK', u'SetTargetPath', u'TARGETDIR', u'1', 1), (u'AdminInstallPointDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'AdminInstallPointDlg', u'Back', u'NewDialog', u'AdminRegistrationDlg', u'1', None), (u'AdminInstallPointDlg', u'Next', u'SetTargetPath', u'TARGETDIR', u'1', 1), (u'AdminInstallPointDlg', u'Next', u'NewDialog', u'VerifyReadyDlg', u'1', 2), (u'AdminInstallPointDlg', u'Browse', u'SpawnDialog', u'AdminBrowseDlg', u'1', None), (u'AdminRegistrationDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'AdminRegistrationDlg', u'Back', u'NewDialog', u'AdminWelcomeDlg', u'1', None), (u'AdminRegistrationDlg', u'Next', u'NewDialog', u'AdminInstallPointDlg', u'ProductID', 2), (u'AdminRegistrationDlg', u'Next', u'ValidateProductID', u'0', u'0', 1), (u'BrowseDlg', u'Up', u'DirectoryListUp', u'0', u'1', None), (u'BrowseDlg', u'Cancel', u'Reset', u'0', u'1', 1), (u'BrowseDlg', u'Cancel', u'EndDialog', u'Return', u'1', 2), (u'BrowseDlg', u'NewFolder', u'DirectoryListNew', u'0', u'1', None), (u'BrowseDlg', u'OK', u'EndDialog', u'Return', u'1', 2), (u'BrowseDlg', u'OK', u'SetTargetPath', u'[_BrowseProperty]', u'1', 1), (u'CancelDlg', u'No', u'EndDialog', u'Return', u'1', None), (u'CancelDlg', u'Yes', u'EndDialog', u'Exit', u'1', None), (u'CustomizeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'CustomizeDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'InstallMode = "Change"', None), (u'CustomizeDlg', u'Back', u'NewDialog', u'SetupTypeDlg', u'InstallMode = "Custom"', None), (u'CustomizeDlg', u'Next', u'NewDialog', u'VerifyReadyDlg', u'1', None), (u'CustomizeDlg', u'Browse', u'SelectionBrowse', u'BrowseDlg', u'1', None), (u'CustomizeDlg', u'Reset', u'Reset', u'0', u'1', None), (u'CustomizeDlg', u'DiskCost', u'SpawnDialog', u'DiskCostDlg', u'1', 2), (u'DiskCostDlg', u'OK', u'EndDialog', u'Return', u'1', None), (u'ErrorDlg', u'Y', u'EndDialog', u'ErrorYes', u'1', None), (u'ErrorDlg', u'A', u'EndDialog', u'ErrorAbort', u'1', None), (u'ErrorDlg', u'C', u'EndDialog', u'ErrorCancel', u'1', None), (u'ErrorDlg', u'I', u'EndDialog', u'ErrorIgnore', u'1', None), (u'ErrorDlg', u'N', u'EndDialog', u'ErrorNo', u'1', None), (u'ErrorDlg', u'O', u'EndDialog', u'ErrorOk', u'1', None), (u'ErrorDlg', u'R', u'EndDialog', u'ErrorRetry', u'1', None), (u'FilesInUse', u'Retry', u'EndDialog', u'Retry', u'1', None), (u'FilesInUse', u'Exit', u'EndDialog', u'Exit', u'1', None), (u'FilesInUse', u'Ignore', u'EndDialog', u'Ignore', u'1', None), (u'LicenseAgreementDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'LicenseAgreementDlg', u'Back', u'NewDialog', u'WelcomeDlg', u'1', None), (u'LicenseAgreementDlg', u'Next', u'NewDialog', u'SetupTypeDlg', u'IAgree = "Yes" AND ShowUserRegistrationDlg <> 1', 3), (u'LicenseAgreementDlg', u'Next', u'NewDialog', u'UserRegistrationDlg', u'IAgree = "Yes" AND ShowUserRegistrationDlg = 1', 1), (u'LicenseAgreementDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 2), (u'MaintenanceTypeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'MaintenanceTypeDlg', u'Back', u'NewDialog', u'MaintenanceWelcomeDlg', u'1', None), (u'MaintenanceTypeDlg', u'ChangeButton', u'NewDialog', u'CustomizeDlg', u'1', 4), (u'MaintenanceTypeDlg', u'ChangeButton', u'[InstallMode]', u'Change', u'1', 1), (u'MaintenanceTypeDlg', u'ChangeButton', u'[Progress1]', u'Changing', u'1', 2), (u'MaintenanceTypeDlg', u'ChangeButton', u'[Progress2]', u'changes', u'1', 3), (u'MaintenanceTypeDlg', u'RemoveButton', u'NewDialog', u'VerifyRemoveDlg', u'1', 4), (u'MaintenanceTypeDlg', u'RemoveButton', u'[InstallMode]', u'Remove', u'1', 1), (u'MaintenanceTypeDlg', u'RemoveButton', u'[Progress1]', u'Removing', u'1', 2), (u'MaintenanceTypeDlg', u'RemoveButton', u'[Progress2]', u'removes', u'1', 3), (u'MaintenanceTypeDlg', u'RepairButton', u'NewDialog', u'VerifyRepairDlg', u'1', 4), (u'MaintenanceTypeDlg', u'RepairButton', u'[InstallMode]', u'Repair', u'1', 1), (u'MaintenanceTypeDlg', u'RepairButton', u'[Progress1]', u'Repairing', u'1', 2), (u'MaintenanceTypeDlg', u'RepairButton', u'[Progress2]', u'repairs', u'1', 3), (u'MaintenanceWelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'MaintenanceWelcomeDlg', u'Next', u'NewDialog', u'MaintenanceTypeDlg', u'1', 2), (u'MaintenanceWelcomeDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 1), (u'OutOfDiskDlg', u'OK', u'EndDialog', u'Return', u'1', None), (u'OutOfRbDiskDlg', u'No', u'EndDialog', u'Return', u'1', None), (u'OutOfRbDiskDlg', u'Yes', u'EndDialog', u'Return', u'1', 2), (u'OutOfRbDiskDlg', u'Yes', u'EnableRollback', u'False', u'1', 1), (u'ResumeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'ResumeDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4), (u'ResumeDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 2), (u'ResumeDlg', u'Install', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 6), (u'ResumeDlg', u'Install', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 3), (u'ResumeDlg', u'Install', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 1), (u'ResumeDlg', u'Install', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5), (u'SetupTypeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'SetupTypeDlg', u'Back', u'NewDialog', u'LicenseAgreementDlg', u'ShowUserRegistrationDlg <> 1', None), (u'SetupTypeDlg', u'Back', u'NewDialog', u'UserRegistrationDlg', u'ShowUserRegistrationDlg = 1', None), (u'SetupTypeDlg', u'CompleteButton', u'NewDialog', u'VerifyReadyDlg', u'1', 3), (u'SetupTypeDlg', u'CompleteButton', u'[InstallMode]', u'Complete', u'1', 1), (u'SetupTypeDlg', u'CompleteButton', u'SetInstallLevel', u'1000', u'1', 2), (u'SetupTypeDlg', u'CustomButton', u'NewDialog', u'CustomizeDlg', u'1', 2), (u'SetupTypeDlg', u'CustomButton', u'[InstallMode]', u'Custom', u'1', 1), (u'SetupTypeDlg', u'TypicalButton', u'NewDialog', u'VerifyReadyDlg', u'1', 3), (u'SetupTypeDlg', u'TypicalButton', u'[InstallMode]', u'Typical', u'1', 1), (u'SetupTypeDlg', u'TypicalButton', u'SetInstallLevel', u'3', u'1', 2), (u'UserRegistrationDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'UserRegistrationDlg', u'Back', u'NewDialog', u'LicenseAgreementDlg', u'1', None), (u'UserRegistrationDlg', u'Next', u'NewDialog', u'SetupTypeDlg', u'ProductID', 3), (u'UserRegistrationDlg', u'Next', u'ValidateProductID', u'0', u'0', 1), (u'UserRegistrationDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 2), (u'VerifyReadyDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'VerifyReadyDlg', u'Back', u'NewDialog', u'AdminInstallPointDlg', u'InstallMode = "Server Image"', None), (u'VerifyReadyDlg', u'Back', u'NewDialog', u'CustomizeDlg', u'InstallMode = "Custom" OR InstallMode = "Change"', None), (u'VerifyReadyDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'InstallMode = "Repair"', None), (u'VerifyReadyDlg', u'Back', u'NewDialog', u'SetupTypeDlg', u'InstallMode = "Typical" OR InstallMode = "Complete"', None), (u'VerifyReadyDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 3), (u'VerifyReadyDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 1), (u'VerifyReadyDlg', u'Install', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 5), (u'VerifyReadyDlg', u'Install', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 2), (u'VerifyReadyDlg', u'Install', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4), (u'VerifyRemoveDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'VerifyRemoveDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'1', None), (u'VerifyRemoveDlg', u'Remove', u'Remove', u'All', u'OutOfDiskSpace <> 1', 1), (u'VerifyRemoveDlg', u'Remove', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4), (u'VerifyRemoveDlg', u'Remove', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 2), (u'VerifyRemoveDlg', u'Remove', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 6), (u'VerifyRemoveDlg', u'Remove', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 3), (u'VerifyRemoveDlg', u'Remove', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5), (u'VerifyRepairDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'VerifyRepairDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'1', None), (u'VerifyRepairDlg', u'Repair', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5), (u'VerifyRepairDlg', u'Repair', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 3), (u'VerifyRepairDlg', u'Repair', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 7), (u'VerifyRepairDlg', u'Repair', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 4), (u'VerifyRepairDlg', u'Repair', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 6), (u'VerifyRepairDlg', u'Repair', u'Reinstall', u'All', u'OutOfDiskSpace <> 1', 2), (u'VerifyRepairDlg', u'Repair', u'ReinstallMode', u'ecmus', u'OutOfDiskSpace <> 1', 1), (u'WaitForCostingDlg', u'Return', u'EndDialog', u'Exit', u'1', None), (u'WelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None), (u'WelcomeDlg', u'Next', u'NewDialog', u'LicenseAgreementDlg', u'1', None), ] Dialog = [ (u'AdminWelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'), (u'ExitDialog', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'), (u'FatalError', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'), (u'PrepareDlg', 50, 50, 370, 270, 1, u'[ProductName] [Setup]', u'Cancel', u'Cancel', u'Cancel'), (u'ProgressDlg', 50, 50, 370, 270, 1, u'[ProductName] [Setup]', u'Cancel', u'Cancel', u'Cancel'), (u'UserExit', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'), (u'AdminBrowseDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'PathEdit', u'OK', u'Cancel'), (u'AdminInstallPointDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Text', u'Next', u'Cancel'), (u'AdminRegistrationDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OrganizationLabel', u'Next', u'Cancel'), (u'BrowseDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'PathEdit', u'OK', u'Cancel'), (u'CancelDlg', 50, 10, 260, 85, 3, u'[ProductName] [Setup]', u'No', u'No', u'No'), (u'CustomizeDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Tree', u'Next', u'Cancel'), (u'DiskCostDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OK', u'OK', u'OK'), (u'ErrorDlg', 50, 10, 270, 105, 65539, u'Installer Information', u'ErrorText', None, None), (u'FilesInUse', 50, 50, 370, 270, 19, u'[ProductName] [Setup]', u'Retry', u'Retry', u'Retry'), (u'LicenseAgreementDlg', 50, 50, 370, 270, 3, u'[ProductName] License Agreement', u'Buttons', u'Next', u'Cancel'), (u'MaintenanceTypeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'ChangeLabel', u'ChangeButton', u'Cancel'), (u'MaintenanceWelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'), (u'OutOfDiskDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OK', u'OK', u'OK'), (u'OutOfRbDiskDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'No', u'No', u'No'), (u'ResumeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Install', u'Install', u'Cancel'), (u'SetupTypeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'TypicalLabel', u'TypicalButton', u'Cancel'), (u'UserRegistrationDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'NameLabel', u'Next', u'Cancel'), (u'VerifyReadyDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Install', u'Install', u'Cancel'), (u'VerifyRemoveDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Back', u'Back', u'Cancel'), (u'VerifyRepairDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Repair', u'Repair', u'Cancel'), (u'WaitForCostingDlg', 50, 10, 260, 85, 3, u'[ProductName] [Setup]', u'Return', u'Return', u'Return'), (u'WelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'), ] EventMapping = [ (u'PrepareDlg', u'ActionData', u'ActionData', u'Text'), (u'PrepareDlg', u'ActionText', u'ActionText', u'Text'), (u'ProgressDlg', u'ActionText', u'ActionText', u'Text'), (u'ProgressDlg', u'ProgressBar', u'SetProgress', u'Progress'), (u'AdminBrowseDlg', u'DirectoryCombo', u'IgnoreChange', u'IgnoreChange'), (u'BrowseDlg', u'DirectoryCombo', u'IgnoreChange', u'IgnoreChange'), (u'CustomizeDlg', u'Next', u'SelectionNoItems', u'Enabled'), (u'CustomizeDlg', u'Reset', u'SelectionNoItems', u'Enabled'), (u'CustomizeDlg', u'DiskCost', u'SelectionNoItems', u'Enabled'), (u'CustomizeDlg', u'ItemDescription', u'SelectionDescription', u'Text'), (u'CustomizeDlg', u'ItemSize', u'SelectionSize', u'Text'), (u'CustomizeDlg', u'Location', u'SelectionPath', u'Text'), (u'CustomizeDlg', u'Location', u'SelectionPathOn', u'Visible'), (u'CustomizeDlg', u'LocationLabel', u'SelectionPathOn', u'Visible'), ] InstallExecuteSequence = [ (u'InstallValidate', None, 1400), (u'InstallInitialize', None, 1500), (u'InstallFinalize', None, 6600), (u'InstallFiles', None, 4000), (u'FileCost', None, 900), (u'CostInitialize', None, 800), (u'CostFinalize', None, 1000), (u'CreateShortcuts', None, 4500), (u'PublishComponents', None, 6200), (u'PublishFeatures', None, 6300), (u'PublishProduct', None, 6400), (u'RegisterClassInfo', None, 4600), (u'RegisterExtensionInfo', None, 4700), (u'RegisterMIMEInfo', None, 4900), (u'RegisterProgIdInfo', None, 4800), (u'ValidateProductID', None, 700), (u'AllocateRegistrySpace', u'NOT Installed', 1550), (u'AppSearch', None, 400), (u'BindImage', None, 4300), (u'CCPSearch', u'NOT Installed', 500), (u'CreateFolders', None, 3700), (u'DeleteServices', u'VersionNT', 2000), (u'DuplicateFiles', None, 4210), (u'FindRelatedProducts', None, 200), (u'InstallODBC', None, 5400), (u'InstallServices', u'VersionNT', 5800), (u'LaunchConditions', None, 100), (u'MigrateFeatureStates', None, 1200), (u'MoveFiles', None, 3800), (u'PatchFiles', None, 4090), (u'ProcessComponents', None, 1600), (u'RegisterComPlus', None, 5700), (u'RegisterFonts', None, 5300), (u'RegisterProduct', None, 6100), (u'RegisterTypeLibraries', None, 5500), (u'RegisterUser', None, 6000), (u'RemoveDuplicateFiles', None, 3400), (u'RemoveEnvironmentStrings', None, 3300), (u'RemoveExistingProducts', None, 6700), (u'RemoveFiles', None, 3500), (u'RemoveFolders', None, 3600), (u'RemoveIniValues', None, 3100), (u'RemoveODBC', None, 2400), (u'RemoveRegistryValues', None, 2600), (u'RemoveShortcuts', None, 3200), (u'RMCCPSearch', u'NOT Installed', 600), (u'SelfRegModules', None, 5600), (u'SelfUnregModules', None, 2200), (u'SetODBCFolders', None, 1100), (u'StartServices', u'VersionNT', 5900), (u'StopServices', u'VersionNT', 1900), (u'UnpublishComponents', None, 1700), (u'UnpublishFeatures', None, 1800), (u'UnregisterClassInfo', None, 2700), (u'UnregisterComPlus', None, 2100), (u'UnregisterExtensionInfo', None, 2800), (u'UnregisterFonts', None, 2500), (u'UnregisterMIMEInfo', None, 3000), (u'UnregisterProgIdInfo', None, 2900), (u'UnregisterTypeLibraries', None, 2300), (u'WriteEnvironmentStrings', None, 5200), (u'WriteIniValues', None, 5100), (u'WriteRegistryValues', None, 5000), ] InstallUISequence = [ #(u'FileCost', None, 900), #(u'CostInitialize', None, 800), #(u'CostFinalize', None, 1000), #(u'ExecuteAction', None, 1300), #(u'ExitDialog', None, -1), #(u'FatalError', None, -3), (u'PrepareDlg', None, 140), (u'ProgressDlg', None, 1280), #(u'UserExit', None, -2), (u'MaintenanceWelcomeDlg', u'Installed AND NOT RESUME AND NOT Preselected', 1250), (u'ResumeDlg', u'Installed AND (RESUME OR Preselected)', 1240), (u'WelcomeDlg', u'NOT Installed', 1230), #(u'AppSearch', None, 400), #(u'CCPSearch', u'NOT Installed', 500), #(u'FindRelatedProducts', None, 200), #(u'LaunchConditions', None, 100), #(u'MigrateFeatureStates', None, 1200), #(u'RMCCPSearch', u'NOT Installed', 600), ] ListView = [ ] RadioButton = [ (u'IAgree', 1, u'Yes', 5, 0, 250, 15, u'{\\DlgFont8}I &accept the terms in the License Agreement', None), (u'IAgree', 2, u'No', 5, 20, 250, 15, u'{\\DlgFont8}I &do not accept the terms in the License Agreement', None), ] TextStyle = [ (u'DlgFont8', u'Tahoma', 8, None, 0), (u'DlgFontBold8', u'Tahoma', 8, None, 1), (u'VerdanaBold13', u'Verdana', 13, None, 1), ] UIText = [ (u'AbsentPath', None), (u'bytes', u'bytes'), (u'GB', u'GB'), (u'KB', u'KB'), (u'MB', u'MB'), (u'MenuAbsent', u'Entire feature will be unavailable'), (u'MenuAdvertise', u'Feature will be installed when required'), (u'MenuAllCD', u'Entire feature will be installed to run from CD'), (u'MenuAllLocal', u'Entire feature will be installed on local hard drive'), (u'MenuAllNetwork', u'Entire feature will be installed to run from network'), (u'MenuCD', u'Will be installed to run from CD'), (u'MenuLocal', u'Will be installed on local hard drive'), (u'MenuNetwork', u'Will be installed to run from network'), (u'ScriptInProgress', u'Gathering required information...'), (u'SelAbsentAbsent', u'This feature will remain uninstalled'), (u'SelAbsentAdvertise', u'This feature will be set to be installed when required'), (u'SelAbsentCD', u'This feature will be installed to run from CD'), (u'SelAbsentLocal', u'This feature will be installed on the local hard drive'), (u'SelAbsentNetwork', u'This feature will be installed to run from the network'), (u'SelAdvertiseAbsent', u'This feature will become unavailable'), (u'SelAdvertiseAdvertise', u'Will be installed when required'), (u'SelAdvertiseCD', u'This feature will be available to run from CD'), (u'SelAdvertiseLocal', u'This feature will be installed on your local hard drive'), (u'SelAdvertiseNetwork', u'This feature will be available to run from the network'), (u'SelCDAbsent', u"This feature will be uninstalled completely, you won't be able to run it from CD"), (u'SelCDAdvertise', u'This feature will change from run from CD state to set to be installed when required'), (u'SelCDCD', u'This feature will remain to be run from CD'), (u'SelCDLocal', u'This feature will change from run from CD state to be installed on the local hard drive'), (u'SelChildCostNeg', u'This feature frees up [1] on your hard drive.'), (u'SelChildCostPos', u'This feature requires [1] on your hard drive.'), (u'SelCostPending', u'Compiling cost for this feature...'), (u'SelLocalAbsent', u'This feature will be completely removed'), (u'SelLocalAdvertise', u'This feature will be removed from your local hard drive, but will be set to be installed when required'), (u'SelLocalCD', u'This feature will be removed from your local hard drive, but will be still available to run from CD'), (u'SelLocalLocal', u'This feature will remain on you local hard drive'), (u'SelLocalNetwork', u'This feature will be removed from your local hard drive, but will be still available to run from the network'), (u'SelNetworkAbsent', u"This feature will be uninstalled completely, you won't be able to run it from the network"), (u'SelNetworkAdvertise', u'This feature will change from run from network state to set to be installed when required'), (u'SelNetworkLocal', u'This feature will change from run from network state to be installed on the local hard drive'), (u'SelNetworkNetwork', u'This feature will remain to be run from the network'), (u'SelParentCostNegNeg', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'), (u'SelParentCostNegPos', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'), (u'SelParentCostPosNeg', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'), (u'SelParentCostPosPos', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'), (u'TimeRemaining', u'Time remaining: {[1] minutes }{[2] seconds}'), (u'VolumeCostAvailable', u'Available'), (u'VolumeCostDifference', u'Difference'), (u'VolumeCostRequired', u'Required'), (u'VolumeCostSize', u'Disk Size'), (u'VolumeCostVolume', u'Volume'), ] _Validation = [ (u'AdminExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), (u'AdminExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), (u'AdminExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), (u'AdminUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), (u'AdminUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), (u'AdminUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), (u'Condition', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Expression evaluated to determine if Level in the Feature table is to change.'), (u'Condition', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Reference to a Feature entry in Feature table.'), (u'Condition', u'Level', u'N', 0, 32767, None, None, None, None, u'New selection Level to set in Feature table if Condition evaluates to TRUE.'), (u'AdvtExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), (u'AdvtExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), (u'AdvtExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), (u'BBControl', u'Type', u'N', None, None, None, None, u'Identifier', None, u'The type of the control.'), (u'BBControl', u'BBControl', u'N', None, None, None, None, u'Identifier', None, u'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.'), (u'BBControl', u'Billboard_', u'N', None, None, u'Billboard', 1, u'Identifier', None, u'External key to the Billboard table, name of the billboard.'), (u'BBControl', u'X', u'N', 0, 32767, None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.'), (u'BBControl', u'Y', u'N', 0, 32767, None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.'), (u'BBControl', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the control.'), (u'BBControl', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the control.'), (u'BBControl', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.'), (u'BBControl', u'Text', u'Y', None, None, None, None, u'Text', None, u'A string used to set the initial text contained within a control (if appropriate).'), (u'Billboard', u'Action', u'Y', None, None, None, None, u'Identifier', None, u'The name of an action. The billboard is displayed during the progress messages received from this action.'), (u'Billboard', u'Billboard', u'N', None, None, None, None, u'Identifier', None, u'Name of the billboard.'), (u'Billboard', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'An external key to the Feature Table. The billboard is shown only if this feature is being installed.'), (u'Billboard', u'Ordering', u'Y', 0, 32767, None, None, None, None, u'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.'), (u'Binary', u'Name', u'N', None, None, None, None, u'Identifier', None, u'Unique key identifying the binary data.'), (u'Binary', u'Data', u'N', None, None, None, None, u'Binary', None, u'The unformatted binary data.'), (u'CheckBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to the item.'), (u'CheckBox', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value string associated with the item.'), (u'Property', u'Property', u'N', None, None, None, None, u'Identifier', None, u'Name of property, uppercase if settable by launcher or loader.'), (u'Property', u'Value', u'N', None, None, None, None, u'Text', None, u'String value for property. Never null or empty.'), (u'ComboBox', u'Text', u'Y', None, None, None, None, u'Formatted', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'), (u'ComboBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.'), (u'ComboBox', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'), (u'ComboBox', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.'), (u'Control', u'Type', u'N', None, None, None, None, u'Identifier', None, u'The type of the control.'), (u'Control', u'X', u'N', 0, 32767, None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.'), (u'Control', u'Y', u'N', 0, 32767, None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.'), (u'Control', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the control.'), (u'Control', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the control.'), (u'Control', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.'), (u'Control', u'Text', u'Y', None, None, None, None, u'Formatted', None, u'A string used to set the initial text contained within a control (if appropriate).'), (u'Control', u'Property', u'Y', None, None, None, None, u'Identifier', None, u'The name of a defined property to be linked to this control. '), (u'Control', u'Control', u'N', None, None, None, None, u'Identifier', None, u'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. '), (u'Control', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'External key to the Dialog table, name of the dialog.'), (u'Control', u'Control_Next', u'Y', None, None, u'Control', 2, u'Identifier', None, u'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!'), (u'Control', u'Help', u'Y', None, None, None, None, u'Text', None, u'The help strings used with the button. The text is optional. '), (u'Icon', u'Name', u'N', None, None, None, None, u'Identifier', None, u'Primary key. Name of the icon file.'), (u'Icon', u'Data', u'N', None, None, None, None, u'Binary', None, u'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.'), (u'ListBox', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'), (u'ListBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.'), (u'ListBox', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'), (u'ListBox', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'), (u'ActionText', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to be described.'), (u'ActionText', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description displayed in progress dialog and log when action is executing.'), (u'ActionText', u'Template', u'Y', None, None, None, None, u'Template', None, u'Optional localized format template used to format action data records for display during action execution.'), (u'ControlCondition', u'Action', u'N', None, None, None, None, None, u'Default;Disable;Enable;Hide;Show', u'The desired action to be taken on the specified control.'), (u'ControlCondition', u'Condition', u'N', None, None, None, None, u'Condition', None, u'A standard conditional statement that specifies under which conditions the action should be triggered.'), (u'ControlCondition', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the dialog.'), (u'ControlCondition', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control.'), (u'ControlEvent', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'A standard conditional statement that specifies under which conditions an event should be triggered.'), (u'ControlEvent', u'Ordering', u'Y', 0, 2147483647, None, None, None, None, u'An integer used to order several events tied to the same control. Can be left blank.'), (u'ControlEvent', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the dialog.'), (u'ControlEvent', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control'), (u'ControlEvent', u'Event', u'N', None, None, None, None, u'Formatted', None, u'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.'), (u'ControlEvent', u'Argument', u'N', None, None, None, None, u'Formatted', None, u'A value to be used as a modifier when triggering a particular event.'), (u'Dialog', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the dialog.'), (u'Dialog', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the dialog.'), (u'Dialog', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this dialog.'), (u'Dialog', u'Title', u'Y', None, None, None, None, u'Formatted', None, u"A text string specifying the title to be displayed in the title bar of the dialog's window."), (u'Dialog', u'Dialog', u'N', None, None, None, None, u'Identifier', None, u'Name of the dialog.'), (u'Dialog', u'HCentering', u'N', 0, 100, None, None, None, None, u'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.'), (u'Dialog', u'VCentering', u'N', 0, 100, None, None, None, None, u'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.'), (u'Dialog', u'Control_First', u'N', None, None, u'Control', 2, u'Identifier', None, u'Defines the control that has the focus when the dialog is created.'), (u'Dialog', u'Control_Default', u'Y', None, None, u'Control', 2, u'Identifier', None, u'Defines the default control. Hitting return is equivalent to pushing this button.'), (u'Dialog', u'Control_Cancel', u'Y', None, None, u'Control', 2, u'Identifier', None, u'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.'), (u'EventMapping', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the Dialog.'), (u'EventMapping', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control.'), (u'EventMapping', u'Event', u'N', None, None, None, None, u'Identifier', None, u'An identifier that specifies the type of the event that the control subscribes to.'), (u'EventMapping', u'Attribute', u'N', None, None, None, None, u'Identifier', None, u'The name of the control attribute, that is set when this event is received.'), (u'InstallExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), (u'InstallExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), (u'InstallExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), (u'AppSearch', u'Property', u'N', None, None, None, None, u'Identifier', None, u'The property associated with a Signature'), (u'AppSearch', u'Signature_', u'N', None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator', 1, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.'), (u'BindImage', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'The index into the File table. This must be an executable file.'), (u'BindImage', u'Path', u'Y', None, None, None, None, u'Paths', None, u'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .'), (u'CCPSearch', u'Signature_', u'N', None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator', 1, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.'), (u'InstallUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), (u'InstallUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), (u'InstallUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), (u'ListView', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'), (u'ListView', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listview.'), (u'ListView', u'Value', u'N', None, None, None, None, u'Identifier', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'), (u'ListView', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'), (u'ListView', u'Binary_', u'Y', None, None, u'Binary', 1, u'Identifier', None, u'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.'), (u'RadioButton', u'X', u'N', 0, 32767, None, None, None, None, u'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.'), (u'RadioButton', u'Y', u'N', 0, 32767, None, None, None, None, u'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.'), (u'RadioButton', u'Width', u'N', 0, 32767, None, None, None, None, u'The width of the button.'), (u'RadioButton', u'Height', u'N', 0, 32767, None, None, None, None, u'The height of the button.'), (u'RadioButton', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible title to be assigned to the radio button.'), (u'RadioButton', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.'), (u'RadioButton', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this button. Selecting the button will set the associated property to this value.'), (u'RadioButton', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'), (u'RadioButton', u'Help', u'Y', None, None, None, None, u'Text', None, u'The help strings used with the button. The text is optional.'), (u'TextStyle', u'TextStyle', u'N', None, None, None, None, u'Identifier', None, u'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.'), (u'TextStyle', u'FaceName', u'N', None, None, None, None, u'Text', None, u'A string indicating the name of the font used. Required. The string must be at most 31 characters long.'), (u'TextStyle', u'Size', u'N', 0, 32767, None, None, None, None, u'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.'), (u'TextStyle', u'Color', u'Y', 0, 16777215, None, None, None, None, u'A long integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).'), (u'TextStyle', u'StyleBits', u'Y', 0, 15, None, None, None, None, u'A combination of style bits.'), (u'UIText', u'Text', u'Y', None, None, None, None, u'Text', None, u'The localized version of the string.'), (u'UIText', u'Key', u'N', None, None, None, None, u'Identifier', None, u'A unique key that identifies the particular string.'), (u'_Validation', u'Table', u'N', None, None, None, None, u'Identifier', None, u'Name of table'), (u'_Validation', u'Description', u'Y', None, None, None, None, u'Text', None, u'Description of column'), (u'_Validation', u'Column', u'N', None, None, None, None, u'Identifier', None, u'Name of column'), (u'_Validation', u'Nullable', u'N', None, None, None, None, None, u'Y;N;@', u'Whether the column is nullable'), (u'_Validation', u'MinValue', u'Y', -2147483647, 2147483647, None, None, None, None, u'Minimum value allowed'), (u'_Validation', u'MaxValue', u'Y', -2147483647, 2147483647, None, None, None, None, u'Maximum value allowed'), (u'_Validation', u'KeyTable', u'Y', None, None, None, None, u'Identifier', None, u'For foreign key, Name of table to which data must link'), (u'_Validation', u'KeyColumn', u'Y', 1, 32, None, None, None, None, u'Column to which foreign key connects'), (u'_Validation', u'Category', u'Y', None, None, None, None, None, u'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL', u'String category'), (u'_Validation', u'Set', u'Y', None, None, None, None, u'Text', None, u'Set of values that are permitted'), (u'AdvtUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'), (u'AdvtUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'), (u'AdvtUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'), (u'AppId', u'AppId', u'N', None, None, None, None, u'Guid', None, None), (u'AppId', u'ActivateAtStorage', u'Y', 0, 1, None, None, None, None, None), (u'AppId', u'DllSurrogate', u'Y', None, None, None, None, u'Text', None, None), (u'AppId', u'LocalService', u'Y', None, None, None, None, u'Text', None, None), (u'AppId', u'RemoteServerName', u'Y', None, None, None, None, u'Formatted', None, None), (u'AppId', u'RunAsInteractiveUser', u'Y', 0, 1, None, None, None, None, None), (u'AppId', u'ServiceParameters', u'Y', None, None, None, None, u'Text', None, None), (u'Feature', u'Attributes', u'N', None, None, None, None, None, u'0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54', u'Feature attributes'), (u'Feature', u'Description', u'Y', None, None, None, None, u'Text', None, u'Longer descriptive text describing a visible feature item.'), (u'Feature', u'Title', u'Y', None, None, None, None, u'Text', None, u'Short text identifying a visible feature item.'), (u'Feature', u'Feature', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular feature record.'), (u'Feature', u'Directory_', u'Y', None, None, u'Directory', 1, u'UpperCase', None, u'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.'), (u'Feature', u'Level', u'N', 0, 32767, None, None, None, None, u'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.'), (u'Feature', u'Display', u'Y', 0, 32767, None, None, None, None, u'Numeric sort order, used to force a specific display ordering.'), (u'Feature', u'Feature_Parent', u'Y', None, None, u'Feature', 1, u'Identifier', None, u'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.'), (u'File', u'Sequence', u'N', 1, 32767, None, None, None, None, u'Sequence with respect to the media images; order must track cabinet order.'), (u'File', u'Attributes', u'Y', 0, 32767, None, None, None, None, u'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)'), (u'File', u'File', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.'), (u'File', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the file.'), (u'File', u'FileName', u'N', None, None, None, None, u'Filename', None, u'File name used for installation, may be localized. This may contain a "short name|long name" pair.'), (u'File', u'FileSize', u'N', 0, 2147483647, None, None, None, None, u'Size of file in bytes (long integer).'), (u'File', u'Language', u'Y', None, None, None, None, u'Language', None, u'List of decimal language Ids, comma-separated if more than one.'), (u'File', u'Version', u'Y', None, None, u'File', 1, u'Version', None, u'Version string for versioned files; Blank for unversioned files.'), (u'Class', u'Attributes', u'Y', None, 32767, None, None, None, None, u'Class registration attributes.'), (u'Class', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.'), (u'Class', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description for the Class.'), (u'Class', u'Argument', u'Y', None, None, None, None, u'Formatted', None, u'optional argument for LocalServers.'), (u'Class', u'AppId_', u'Y', None, None, u'AppId', 1, u'Guid', None, u'Optional AppID containing DCOM information for associated application (string GUID).'), (u'Class', u'CLSID', u'N', None, None, None, None, u'Guid', None, u'The CLSID of an OLE factory.'), (u'Class', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'), (u'Class', u'Context', u'N', None, None, None, None, u'Identifier', None, u'The numeric server context for this server. CLSCTX_xxxx'), (u'Class', u'DefInprocHandler', u'Y', None, None, None, None, u'Filename', u'1;2;3', u'Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"'), (u'Class', u'FileTypeMask', u'Y', None, None, None, None, u'Text', None, u'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...'), (u'Class', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.'), (u'Class', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'Optional icon index.'), (u'Class', u'ProgId_Default', u'Y', None, None, u'ProgId', 1, u'Text', None, u'Optional ProgId associated with this CLSID.'), (u'Component', u'Condition', u'Y', None, None, None, None, u'Condition', None, u"A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component."), (u'Component', u'Attributes', u'N', None, None, None, None, None, None, u'Remote execution option, one of irsEnum'), (u'Component', u'Component', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular component record.'), (u'Component', u'ComponentId', u'Y', None, None, None, None, u'Guid', None, u'A string GUID unique to this component, version, and language.'), (u'Component', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.'), (u'Component', u'KeyPath', u'Y', None, None, u'File;Registry;ODBCDataSource', 1, u'Identifier', None, u'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.'), (u'ProgId', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description for the Program identifier.'), (u'ProgId', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.'), (u'ProgId', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'Optional icon index.'), (u'ProgId', u'ProgId', u'N', None, None, None, None, u'Text', None, u'The Program Identifier. Primary key.'), (u'ProgId', u'Class_', u'Y', None, None, u'Class', 1, u'Guid', None, u'The CLSID of an OLE factory corresponding to the ProgId.'), (u'ProgId', u'ProgId_Parent', u'Y', None, None, u'ProgId', 1, u'Text', None, u'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.'), (u'CompLocator', u'Type', u'Y', 0, 1, None, None, None, None, u'A boolean value that determines if the registry value is a filename or a directory location.'), (u'CompLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'), (u'CompLocator', u'ComponentId', u'N', None, None, None, None, u'Guid', None, u'A string GUID unique to this component, version, and language.'), (u'Complus', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the ComPlus component.'), (u'Complus', u'ExpType', u'Y', 0, 32767, None, None, None, None, u'ComPlus component attributes.'), (u'Directory', u'Directory', u'N', None, None, None, None, u'Identifier', None, u'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.'), (u'Directory', u'DefaultDir', u'N', None, None, None, None, u'DefaultDir', None, u"The default sub-path under parent's path."), (u'Directory', u'Directory_Parent', u'Y', None, None, u'Directory', 1, u'Identifier', None, u'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.'), (u'CreateFolder', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table.'), (u'CreateFolder', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Primary key, could be foreign key into the Directory table.'), (u'CustomAction', u'Type', u'N', 1, 16383, None, None, None, None, u'The numeric custom action type, consisting of source location, code type, entry, option flags.'), (u'CustomAction', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Primary key, name of action, normally appears in sequence table unless private use.'), (u'CustomAction', u'Source', u'Y', None, None, None, None, u'CustomSource', None, u'The table reference of the source of the code.'), (u'CustomAction', u'Target', u'Y', None, None, None, None, u'Formatted', None, u'Excecution parameter, depends on the type of custom action'), (u'DrLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'), (u'DrLocator', u'Path', u'Y', None, None, None, None, u'AnyPath', None, u'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.'), (u'DrLocator', u'Depth', u'Y', 0, 32767, None, None, None, None, u'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.'), (u'DrLocator', u'Parent', u'Y', None, None, None, None, u'Identifier', None, u'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.'), (u'DuplicateFile', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Foreign key referencing the source file to be duplicated.'), (u'DuplicateFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the duplicate file.'), (u'DuplicateFile', u'DestFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.'), (u'DuplicateFile', u'DestName', u'Y', None, None, None, None, u'Filename', None, u'Filename to be given to the duplicate file.'), (u'DuplicateFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular file entry'), (u'Environment', u'Name', u'N', None, None, None, None, u'Text', None, u'The name of the environmental value.'), (u'Environment', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value to set in the environmental settings.'), (u'Environment', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the environmental value.'), (u'Environment', u'Environment', u'N', None, None, None, None, u'Identifier', None, u'Unique identifier for the environmental variable setting'), (u'Error', u'Error', u'N', 0, 32767, None, None, None, None, u'Integer error number, obtained from header file IError(...) macros.'), (u'Error', u'Message', u'Y', None, None, None, None, u'Template', None, u'Error formatting template, obtained from user ed. or localizers.'), (u'Extension', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.'), (u'Extension', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'), (u'Extension', u'Extension', u'N', None, None, None, None, u'Text', None, u'The extension associated with the table row.'), (u'Extension', u'MIME_', u'Y', None, None, u'MIME', 1, u'Text', None, u'Optional Context identifier, typically "type/format" associated with the extension'), (u'Extension', u'ProgId_', u'Y', None, None, u'ProgId', 1, u'Text', None, u'Optional ProgId associated with this extension.'), (u'MIME', u'CLSID', u'Y', None, None, None, None, u'Guid', None, u'Optional associated CLSID.'), (u'MIME', u'ContentType', u'N', None, None, None, None, u'Text', None, u'Primary key. Context identifier, typically "type/format".'), (u'MIME', u'Extension_', u'N', None, None, u'Extension', 1, u'Text', None, u'Optional associated extension (without dot)'), (u'FeatureComponents', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into Feature table.'), (u'FeatureComponents', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'), (u'FileSFPCatalog', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'File associated with the catalog'), (u'FileSFPCatalog', u'SFPCatalog_', u'N', None, None, u'SFPCatalog', 1, u'Filename', None, u'Catalog associated with the file'), (u'SFPCatalog', u'SFPCatalog', u'N', None, None, None, None, u'Filename', None, u'File name for the catalog.'), (u'SFPCatalog', u'Catalog', u'N', None, None, None, None, u'Binary', None, u'SFP Catalog'), (u'SFPCatalog', u'Dependency', u'Y', None, None, None, None, u'Formatted', None, u'Parent catalog - only used by SFP'), (u'Font', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Primary key, foreign key into File table referencing font file.'), (u'Font', u'FontTitle', u'Y', None, None, None, None, u'Text', None, u'Font name.'), (u'IniFile', u'Action', u'N', None, None, None, None, None, u'0;1;3', u'The type of modification to be made, one of iifEnum'), (u'IniFile', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value to be written.'), (u'IniFile', u'Key', u'N', None, None, None, None, u'Formatted', None, u'The .INI file key below Section.'), (u'IniFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the .INI value.'), (u'IniFile', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name in which to write the information'), (u'IniFile', u'IniFile', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), (u'IniFile', u'DirProperty', u'Y', None, None, None, None, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the .INI file is.'), (u'IniFile', u'Section', u'N', None, None, None, None, u'Formatted', None, u'The .INI file Section.'), (u'IniLocator', u'Type', u'Y', 0, 2, None, None, None, None, u'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.'), (u'IniLocator', u'Key', u'N', None, None, None, None, u'Text', None, u'Key value (followed by an equals sign in INI file).'), (u'IniLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'), (u'IniLocator', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name.'), (u'IniLocator', u'Section', u'N', None, None, None, None, u'Text', None, u'Section name within in file (within square brackets in INI file).'), (u'IniLocator', u'Field', u'Y', 0, 32767, None, None, None, None, u'The field in the .INI line. If Field is null or 0 the entire line is read.'), (u'IsolatedComponent', u'Component_Application', u'N', None, None, u'Component', 1, u'Identifier', None, u'Key to Component table item for application'), (u'IsolatedComponent', u'Component_Shared', u'N', None, None, u'Component', 1, u'Identifier', None, u'Key to Component table item to be isolated'), (u'LaunchCondition', u'Condition', u'N', None, None, None, None, u'Condition', None, u'Expression which must evaluate to TRUE in order for install to commence.'), (u'LaunchCondition', u'Description', u'N', None, None, None, None, u'Formatted', None, u'Localizable text to display when condition fails and install must abort.'), (u'LockPermissions', u'Table', u'N', None, None, None, None, u'Identifier', u'Directory;File;Registry', u'Reference to another table name'), (u'LockPermissions', u'Domain', u'Y', None, None, None, None, u'Formatted', None, u'Domain name for user whose permissions are being set. (usually a property)'), (u'LockPermissions', u'LockObject', u'N', None, None, None, None, u'Identifier', None, u'Foreign key into Registry or File table'), (u'LockPermissions', u'Permission', u'Y', -2147483647, 2147483647, None, None, None, None, u'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)'), (u'LockPermissions', u'User', u'N', None, None, None, None, u'Formatted', None, u'User for permissions to be set. (usually a property)'), (u'Media', u'Source', u'Y', None, None, None, None, u'Property', None, u'The property defining the location of the cabinet file.'), (u'Media', u'Cabinet', u'Y', None, None, None, None, u'Cabinet', None, u'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.'), (u'Media', u'DiskId', u'N', 1, 32767, None, None, None, None, u'Primary key, integer to determine sort order for table.'), (u'Media', u'DiskPrompt', u'Y', None, None, None, None, u'Text', None, u'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.'), (u'Media', u'LastSequence', u'N', 0, 32767, None, None, None, None, u'File sequence number for the last file for this media.'), (u'Media', u'VolumeLabel', u'Y', None, None, None, None, u'Text', None, u'The label attributed to the volume.'), (u'ModuleComponents', u'Component', u'N', None, None, u'Component', 1, u'Identifier', None, u'Component contained in the module.'), (u'ModuleComponents', u'Language', u'N', None, None, u'ModuleSignature', 2, None, None, u'Default language ID for module (may be changed by transform).'), (u'ModuleComponents', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'Module containing the component.'), (u'ModuleSignature', u'Language', u'N', None, None, None, None, None, None, u'Default decimal language of module.'), (u'ModuleSignature', u'Version', u'N', None, None, None, None, u'Version', None, u'Version of the module.'), (u'ModuleSignature', u'ModuleID', u'N', None, None, None, None, u'Identifier', None, u'Module identifier (String.GUID).'), (u'ModuleDependency', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'Module requiring the dependency.'), (u'ModuleDependency', u'ModuleLanguage', u'N', None, None, u'ModuleSignature', 2, None, None, u'Language of module requiring the dependency.'), (u'ModuleDependency', u'RequiredID', u'N', None, None, None, None, None, None, u'String.GUID of required module.'), (u'ModuleDependency', u'RequiredLanguage', u'N', None, None, None, None, None, None, u'LanguageID of the required module.'), (u'ModuleDependency', u'RequiredVersion', u'Y', None, None, None, None, u'Version', None, u'Version of the required version.'), (u'ModuleExclusion', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'String.GUID of module with exclusion requirement.'), (u'ModuleExclusion', u'ModuleLanguage', u'N', None, None, u'ModuleSignature', 2, None, None, u'LanguageID of module with exclusion requirement.'), (u'ModuleExclusion', u'ExcludedID', u'N', None, None, None, None, None, None, u'String.GUID of excluded module.'), (u'ModuleExclusion', u'ExcludedLanguage', u'N', None, None, None, None, None, None, u'Language of excluded module.'), (u'ModuleExclusion', u'ExcludedMaxVersion', u'Y', None, None, None, None, u'Version', None, u'Maximum version of excluded module.'), (u'ModuleExclusion', u'ExcludedMinVersion', u'Y', None, None, None, None, u'Version', None, u'Minimum version of excluded module.'), (u'MoveFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry'), (u'MoveFile', u'DestFolder', u'N', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory'), (u'MoveFile', u'DestName', u'Y', None, None, None, None, u'Filename', None, u'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file'), (u'MoveFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key that uniquely identifies a particular MoveFile record'), (u'MoveFile', u'Options', u'N', 0, 1, None, None, None, None, u'Integer value specifying the MoveFile operating mode, one of imfoEnum'), (u'MoveFile', u'SourceFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the source directory'), (u'MoveFile', u'SourceName', u'Y', None, None, None, None, u'Text', None, u"Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards."), (u'MsiAssembly', u'Attributes', u'Y', None, None, None, None, None, None, u'Assembly attributes'), (u'MsiAssembly', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into Feature table.'), (u'MsiAssembly', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'), (u'MsiAssembly', u'File_Application', u'Y', None, None, u'File', 1, u'Identifier', None, u'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.'), (u'MsiAssembly', u'File_Manifest', u'Y', None, None, u'File', 1, u'Identifier', None, u'Foreign key into the File table denoting the manifest file for the assembly.'), (u'MsiAssemblyName', u'Name', u'N', None, None, None, None, u'Text', None, u'The name part of the name-value pairs for the assembly name.'), (u'MsiAssemblyName', u'Value', u'N', None, None, None, None, u'Text', None, u'The value part of the name-value pairs for the assembly name.'), (u'MsiAssemblyName', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'), (u'MsiDigitalCertificate', u'CertData', u'N', None, None, None, None, u'Binary', None, u'A certificate context blob for a signer certificate'), (u'MsiDigitalCertificate', u'DigitalCertificate', u'N', None, None, None, None, u'Identifier', None, u'A unique identifier for the row'), (u'MsiDigitalSignature', u'Table', u'N', None, None, None, None, None, u'Media', u'Reference to another table name (only Media table is supported)'), (u'MsiDigitalSignature', u'DigitalCertificate_', u'N', None, None, u'MsiDigitalCertificate', 1, u'Identifier', None, u'Foreign key to MsiDigitalCertificate table identifying the signer certificate'), (u'MsiDigitalSignature', u'Hash', u'Y', None, None, None, None, u'Binary', None, u'The encoded hash blob from the digital signature'), (u'MsiDigitalSignature', u'SignObject', u'N', None, None, None, None, u'Text', None, u'Foreign key to Media table'), (u'MsiFileHash', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Primary key, foreign key into File table referencing file with this hash'), (u'MsiFileHash', u'Options', u'N', 0, 32767, None, None, None, None, u'Various options and attributes for this hash.'), (u'MsiFileHash', u'HashPart1', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'), (u'MsiFileHash', u'HashPart2', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'), (u'MsiFileHash', u'HashPart3', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'), (u'MsiFileHash', u'HashPart4', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'), (u'MsiPatchHeaders', u'StreamRef', u'N', None, None, None, None, u'Identifier', None, u'Primary key. A unique identifier for the row.'), (u'MsiPatchHeaders', u'Header', u'N', None, None, None, None, u'Binary', None, u'Binary stream. The patch header, used for patch validation.'), (u'ODBCAttribute', u'Value', u'Y', None, None, None, None, u'Text', None, u'Value for ODBC driver attribute'), (u'ODBCAttribute', u'Attribute', u'N', None, None, None, None, u'Text', None, u'Name of ODBC driver attribute'), (u'ODBCAttribute', u'Driver_', u'N', None, None, u'ODBCDriver', 1, u'Identifier', None, u'Reference to ODBC driver in ODBCDriver table'), (u'ODBCDriver', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for driver, non-localized'), (u'ODBCDriver', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Reference to key driver file'), (u'ODBCDriver', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'), (u'ODBCDriver', u'Driver', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for driver'), (u'ODBCDriver', u'File_Setup', u'Y', None, None, u'File', 1, u'Identifier', None, u'Optional reference to key driver setup DLL'), (u'ODBCDataSource', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for data source'), (u'ODBCDataSource', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'), (u'ODBCDataSource', u'DataSource', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for data source'), (u'ODBCDataSource', u'DriverDescription', u'N', None, None, None, None, u'Text', None, u'Reference to driver description, may be existing driver'), (u'ODBCDataSource', u'Registration', u'N', 0, 1, None, None, None, None, u'Registration option: 0=machine, 1=user, others t.b.d.'), (u'ODBCSourceAttribute', u'Value', u'Y', None, None, None, None, u'Text', None, u'Value for ODBC data source attribute'), (u'ODBCSourceAttribute', u'Attribute', u'N', None, None, None, None, u'Text', None, u'Name of ODBC data source attribute'), (u'ODBCSourceAttribute', u'DataSource_', u'N', None, None, u'ODBCDataSource', 1, u'Identifier', None, u'Reference to ODBC data source in ODBCDataSource table'), (u'ODBCTranslator', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for translator'), (u'ODBCTranslator', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Reference to key translator file'), (u'ODBCTranslator', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'), (u'ODBCTranslator', u'File_Setup', u'Y', None, None, u'File', 1, u'Identifier', None, u'Optional reference to key translator setup DLL'), (u'ODBCTranslator', u'Translator', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for translator'), (u'Patch', u'Sequence', u'N', 0, 32767, None, None, None, None, u'Primary key, sequence with respect to the media images; order must track cabinet order.'), (u'Patch', u'Attributes', u'N', 0, 32767, None, None, None, None, u'Integer containing bit flags representing patch attributes'), (u'Patch', u'File_', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.'), (u'Patch', u'Header', u'Y', None, None, None, None, u'Binary', None, u'Binary stream. The patch header, used for patch validation.'), (u'Patch', u'PatchSize', u'N', 0, 2147483647, None, None, None, None, u'Size of patch in bytes (long integer).'), (u'Patch', u'StreamRef_', u'Y', None, None, None, None, u'Identifier', None, u'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.'), (u'PatchPackage', u'Media_', u'N', 0, 32767, None, None, None, None, u'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.'), (u'PatchPackage', u'PatchId', u'N', None, None, None, None, u'Guid', None, u'A unique string GUID representing this patch.'), (u'PublishComponent', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into the Feature table.'), (u'PublishComponent', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table.'), (u'PublishComponent', u'ComponentId', u'N', None, None, None, None, u'Guid', None, u'A string GUID that represents the component id that will be requested by the alien product.'), (u'PublishComponent', u'AppData', u'Y', None, None, None, None, u'Text', None, u'This is localisable Application specific data that can be associated with a Qualified Component.'), (u'PublishComponent', u'Qualifier', u'N', None, None, None, None, u'Text', None, u'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.'), (u'Registry', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'), (u'Registry', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The registry value.'), (u'Registry', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'), (u'Registry', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the registry value.'), (u'Registry', u'Registry', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), (u'Registry', u'Root', u'N', -1, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.'), (u'RegLocator', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'), (u'RegLocator', u'Type', u'Y', 0, 18, None, None, None, None, u'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.'), (u'RegLocator', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'), (u'RegLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.'), (u'RegLocator', u'Root', u'N', 0, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.'), (u'RemoveFile', u'InstallMode', u'N', None, None, None, None, None, u'1;2;3', u'Installation option, one of iimEnum.'), (u'RemoveFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the file to be removed.'), (u'RemoveFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular file entry'), (u'RemoveFile', u'FileName', u'Y', None, None, None, None, u'WildCardFilename', None, u'Name of the file to be removed.'), (u'RemoveFile', u'DirProperty', u'N', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.'), (u'RemoveIniFile', u'Action', u'N', None, None, None, None, None, u'2;4', u'The type of modification to be made, one of iifEnum.'), (u'RemoveIniFile', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value to be deleted. The value is required when Action is iifIniRemoveTag'), (u'RemoveIniFile', u'Key', u'N', None, None, None, None, u'Formatted', None, u'The .INI file key below Section.'), (u'RemoveIniFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the deletion of the .INI value.'), (u'RemoveIniFile', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name in which to delete the information'), (u'RemoveIniFile', u'DirProperty', u'Y', None, None, None, None, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the .INI file is.'), (u'RemoveIniFile', u'Section', u'N', None, None, None, None, u'Formatted', None, u'The .INI file Section.'), (u'RemoveIniFile', u'RemoveIniFile', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), (u'RemoveRegistry', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'), (u'RemoveRegistry', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'), (u'RemoveRegistry', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the deletion of the registry value.'), (u'RemoveRegistry', u'Root', u'N', -1, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum'), (u'RemoveRegistry', u'RemoveRegistry', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), (u'ReserveCost', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reserve a specified amount of space if this component is to be installed.'), (u'ReserveCost', u'ReserveFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory'), (u'ReserveCost', u'ReserveKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key that uniquely identifies a particular ReserveCost record'), (u'ReserveCost', u'ReserveLocal', u'N', 0, 2147483647, None, None, None, None, u'Disk space to reserve if linked component is installed locally.'), (u'ReserveCost', u'ReserveSource', u'N', 0, 2147483647, None, None, None, None, u'Disk space to reserve if linked component is installed to run from the source location.'), (u'SelfReg', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Foreign key into the File table denoting the module that needs to be registered.'), (u'SelfReg', u'Cost', u'Y', 0, 32767, None, None, None, None, u'The cost of registering the module.'), (u'ServiceControl', u'Name', u'N', None, None, None, None, u'Formatted', None, u'Name of a service. /, \\, comma and space are invalid'), (u'ServiceControl', u'Event', u'N', 0, 187, None, None, None, None, u'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete'), (u'ServiceControl', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table that controls the startup of the service'), (u'ServiceControl', u'ServiceControl', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), (u'ServiceControl', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'Arguments for the service. Separate by [~].'), (u'ServiceControl', u'Wait', u'Y', 0, 1, None, None, None, None, u'Boolean for whether to wait for the service to fully start'), (u'ServiceInstall', u'Name', u'N', None, None, None, None, u'Formatted', None, u'Internal Name of the Service'), (u'ServiceInstall', u'Description', u'Y', None, None, None, None, u'Text', None, u'Description of service.'), (u'ServiceInstall', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table that controls the startup of the service'), (u'ServiceInstall', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'Arguments to include in every start of the service, passed to WinMain'), (u'ServiceInstall', u'ServiceInstall', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), (u'ServiceInstall', u'Dependencies', u'Y', None, None, None, None, u'Formatted', None, u'Other services this depends on to start. Separate by [~], and end with [~][~]'), (u'ServiceInstall', u'DisplayName', u'Y', None, None, None, None, u'Formatted', None, u'External Name of the Service'), (u'ServiceInstall', u'ErrorControl', u'N', -2147483647, 2147483647, None, None, None, None, u'Severity of error if service fails to start'), (u'ServiceInstall', u'LoadOrderGroup', u'Y', None, None, None, None, u'Formatted', None, u'LoadOrderGroup'), (u'ServiceInstall', u'Password', u'Y', None, None, None, None, u'Formatted', None, u'password to run service with. (with StartName)'), (u'ServiceInstall', u'ServiceType', u'N', -2147483647, 2147483647, None, None, None, None, u'Type of the service'), (u'ServiceInstall', u'StartName', u'Y', None, None, None, None, u'Formatted', None, u'User or object name to run service as'), (u'ServiceInstall', u'StartType', u'N', 0, 4, None, None, None, None, u'Type of the service'), (u'Shortcut', u'Name', u'N', None, None, None, None, u'Filename', None, u'The name of the shortcut to be created.'), (u'Shortcut', u'Description', u'Y', None, None, None, None, u'Text', None, u'The description for the shortcut.'), (u'Shortcut', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table denoting the component whose selection gates the shortcut creation/deletion.'), (u'Shortcut', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Foreign key into the File table denoting the external icon file for the shortcut.'), (u'Shortcut', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'The icon index for the shortcut.'), (u'Shortcut', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the shortcut file is created.'), (u'Shortcut', u'Target', u'N', None, None, None, None, u'Shortcut', None, u'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.'), (u'Shortcut', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'The command-line arguments for the shortcut.'), (u'Shortcut', u'Shortcut', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'), (u'Shortcut', u'Hotkey', u'Y', 0, 32767, None, None, None, None, u'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. '), (u'Shortcut', u'ShowCmd', u'Y', None, None, None, None, None, u'1;3;7', u'The show command for the application window.The following values may be used.'), (u'Shortcut', u'WkDir', u'Y', None, None, None, None, u'Identifier', None, u'Name of property defining location of working directory.'), (u'Signature', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The name of the file. This may contain a "short name|long name" pair.'), (u'Signature', u'Signature', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature represents a unique file signature.'), (u'Signature', u'Languages', u'Y', None, None, None, None, u'Language', None, u'The languages supported by the file.'), (u'Signature', u'MaxDate', u'Y', 0, 2147483647, None, None, None, None, u'The maximum creation date of the file.'), (u'Signature', u'MaxSize', u'Y', 0, 2147483647, None, None, None, None, u'The maximum size of the file. '), (u'Signature', u'MaxVersion', u'Y', None, None, None, None, u'Text', None, u'The maximum version of the file.'), (u'Signature', u'MinDate', u'Y', 0, 2147483647, None, None, None, None, u'The minimum creation date of the file.'), (u'Signature', u'MinSize', u'Y', 0, 2147483647, None, None, None, None, u'The minimum size of the file.'), (u'Signature', u'MinVersion', u'Y', None, None, None, None, u'Text', None, u'The minimum version of the file.'), (u'TypeLib', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.'), (u'TypeLib', u'Description', u'Y', None, None, None, None, u'Text', None, None), (u'TypeLib', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'), (u'TypeLib', u'Directory_', u'Y', None, None, u'Directory', 1, u'Identifier', None, u'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.'), (u'TypeLib', u'Language', u'N', 0, 32767, None, None, None, None, u'The language of the library.'), (u'TypeLib', u'Version', u'Y', 0, 16777215, None, None, None, None, u'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. '), (u'TypeLib', u'Cost', u'Y', 0, 2147483647, None, None, None, None, u'The cost associated with the registration of the typelib. This column is currently optional.'), (u'TypeLib', u'LibID', u'N', None, None, None, None, u'Guid', None, u'The GUID that represents the library.'), (u'Upgrade', u'Attributes', u'N', 0, 2147483647, None, None, None, None, u'The attributes of this product set.'), (u'Upgrade', u'Remove', u'Y', None, None, None, None, u'Formatted', None, u'The list of features to remove when uninstalling a product from this set. The default is "ALL".'), (u'Upgrade', u'Language', u'Y', None, None, None, None, u'Language', None, u'A comma-separated list of languages for either products in this set or products not in this set.'), (u'Upgrade', u'ActionProperty', u'N', None, None, None, None, u'UpperCase', None, u'The property to set when a product in this set is found.'), (u'Upgrade', u'UpgradeCode', u'N', None, None, None, None, u'Guid', None, u'The UpgradeCode GUID belonging to the products in this set.'), (u'Upgrade', u'VersionMax', u'Y', None, None, None, None, u'Text', None, u'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.'), (u'Upgrade', u'VersionMin', u'Y', None, None, None, None, u'Text', None, u'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.'), (u'Verb', u'Sequence', u'Y', 0, 32767, None, None, None, None, u'Order within the verbs for a particular extension. Also used simply to specify the default verb.'), (u'Verb', u'Argument', u'Y', None, None, None, None, u'Formatted', None, u'Optional value for the command arguments.'), (u'Verb', u'Extension_', u'N', None, None, u'Extension', 1, u'Text', None, u'The extension associated with the table row.'), (u'Verb', u'Verb', u'N', None, None, None, None, u'Text', None, u'The verb for the command.'), (u'Verb', u'Command', u'Y', None, None, None, None, u'Formatted', None, u'The command text.'), ] Error = [ (0, u'{{Fatal error: }}'), (1, u'{{Error [1]. }}'), (2, u'Warning [1]. '), (3, None), (4, u'Info [1]. '), (5, u'The installer has encountered an unexpected error installing this package. This may indicate a problem with this package. The error code is [1]. {{The arguments are: [2], [3], [4]}}'), (6, None), (7, u'{{Disk full: }}'), (8, u'Action [Time]: [1]. [2]'), (9, u'[ProductName]'), (10, u'{[2]}{, [3]}{, [4]}'), (11, u'Message type: [1], Argument: [2]'), (12, u'=== Logging started: [Date] [Time] ==='), (13, u'=== Logging stopped: [Date] [Time] ==='), (14, u'Action start [Time]: [1].'), (15, u'Action ended [Time]: [1]. Return value [2].'), (16, u'Time remaining: {[1] minutes }{[2] seconds}'), (17, u'Out of memory. Shut down other applications before retrying.'), (18, u'Installer is no longer responding.'), (19, u'Installer stopped prematurely.'), (20, u'Please wait while Windows configures [ProductName]'), (21, u'Gathering required information...'), (22, u'Removing older versions of this application...'), (23, u'Preparing to remove older versions of this application...'), (32, u'{[ProductName] }Setup completed successfully.'), (33, u'{[ProductName] }Setup failed.'), (1101, u'Error reading from file: [2]. {{ System error [3].}} Verify that the file exists and that you can access it.'), (1301, u"Cannot create the file '[2]'. A directory with this name already exists. Cancel the install and try installing to a different location."), (1302, u'Please insert the disk: [2]'), (1303, u'The installer has insufficient privileges to access this directory: [2]. The installation cannot continue. Log on as administrator or contact your system administrator.'), (1304, u'Error writing to file: [2]. Verify that you have access to that directory.'), (1305, u'Error reading from file [2]. {{ System error [3].}} Verify that the file exists and that you can access it.'), (1306, u"Another application has exclusive access to the file '[2]'. Please shut down all other applications, then click Retry."), (1307, u'There is not enough disk space to install this file: [2]. Free some disk space and click Retry, or click Cancel to exit.'), (1308, u'Source file not found: [2]. Verify that the file exists and that you can access it.'), (1309, u'Error reading from file: [3]. {{ System error [2].}} Verify that the file exists and that you can access it.'), (1310, u'Error writing to file: [3]. {{ System error [2].}} Verify that you have access to that directory.'), (1311, u'Source file not found{{(cabinet)}}: [2]. Verify that the file exists and that you can access it.'), (1312, u"Cannot create the directory '[2]'. A file with this name already exists. Please rename or remove the file and click retry, or click Cancel to exit."), (1313, u'The volume [2] is currently unavailable. Please select another.'), (1314, u"The specified path '[2]' is unavailable."), (1315, u'Unable to write to the specified folder: [2].'), (1316, u'A network error occurred while attempting to read from the file: [2]'), (1317, u'An error occurred while attempting to create the directory: [2]'), (1318, u'A network error occurred while attempting to create the directory: [2]'), (1319, u'A network error occurred while attempting to open the source file cabinet: [2]'), (1320, u'The specified path is too long: [2]'), (1321, u'The Installer has insufficient privileges to modify this file: [2].'), (1322, u"A portion of the folder path '[2]' is invalid. It is either empty or exceeds the length allowed by the system."), (1323, u"The folder path '[2]' contains words that are not valid in folder paths."), (1324, u"The folder path '[2]' contains an invalid character."), (1325, u"'[2]' is not a valid short file name."), (1326, u'Error getting file security: [3] GetLastError: [2]'), (1327, u'Invalid Drive: [2]'), (1328, u'Error applying patch to file [2]. It has probably been updated by other means, and can no longer be modified by this patch. For more information contact your patch vendor. {{System Error: [3]}}'), (1329, u'A file that is required cannot be installed because the cabinet file [2] is not digitally signed. This may indicate that the cabinet file is corrupt.'), (1330, u'A file that is required cannot be installed because the cabinet file [2] has an invalid digital signature. This may indicate that the cabinet file is corrupt.{{ Error [3] was returned by WinVerifyTrust.}}'), (1331, u'Failed to correctly copy [2] file: CRC error.'), (1332, u'Failed to correctly move [2] file: CRC error.'), (1333, u'Failed to correctly patch [2] file: CRC error.'), (1334, u"The file '[2]' cannot be installed because the file cannot be found in cabinet file '[3]'. This could indicate a network error, an error reading from the CD-ROM, or a problem with this package."), (1335, u"The cabinet file '[2]' required for this installation is corrupt and cannot be used. This could indicate a network error, an error reading from the CD-ROM, or a problem with this package."), (1336, u'There was an error creating a temporary file that is needed to complete this installation.{{ Folder: [3]. System error code: [2]}}'), (1401, u'Could not create key: [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '), (1402, u'Could not open key: [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '), (1403, u'Could not delete value [2] from key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel. '), (1404, u'Could not delete key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '), (1405, u'Could not read value [2] from key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel. '), (1406, u'Could not write value [2] to key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel.'), (1407, u'Could not get value names for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'), (1408, u'Could not get sub key names for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'), (1409, u'Could not read security information for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'), (1410, u'Could not increase the available registry space. [2] KB of free registry space is required for the installation of this application.'), (1500, u'Another installation is in progress. You must complete that installation before continuing this one.'), (1501, u'Error accessing secured data. Please make sure the Windows Installer is configured properly and try the install again.'), (1502, u"User '[2]' has previously initiated an install for product '[3]'. That user will need to run that install again before they can use that product. Your current install will now continue."), (1503, u"User '[2]' has previously initiated an install for product '[3]'. That user will need to run that install again before they can use that product."), (1601, u"Out of disk space -- Volume: '[2]'; required space: [3] KB; available space: [4] KB. Free some disk space and retry."), (1602, u'Are you sure you want to cancel?'), (1603, u"The file [2][3] is being held in use{ by the following process: Name: [4], Id: [5], Window Title: '[6]'}. Close that application and retry."), (1604, u"The product '[2]' is already installed, preventing the installation of this product. The two products are incompatible."), (1605, u"There is not enough disk space on the volume '[2]' to continue the install with recovery enabled. [3] KB are required, but only [4] KB are available. Click Ignore to continue the install without saving recovery information, click Retry to check for available space again, or click Cancel to quit the installation."), (1606, u'Could not access network location [2].'), (1607, u'The following applications should be closed before continuing the install:'), (1608, u'Could not find any previously installed compliant products on the machine for installing this product.'), (1609, u"An error occurred while applying security settings. [2] is not a valid user or group. This could be a problem with the package, or a problem connecting to a domain controller on the network. Check your network connection and click Retry, or Cancel to end the install. {{Unable to locate the user's SID, system error [3]}}"), (1701, u'The key [2] is not valid. Verify that you entered the correct key.'), (1702, u'The installer must restart your system before configuration of [2] can continue. Click Yes to restart now or No if you plan to manually restart later.'), (1703, u'You must restart your system for the configuration changes made to [2] to take effect. Click Yes to restart now or No if you plan to manually restart later.'), (1704, u'An installation for [2] is currently suspended. You must undo the changes made by that installation to continue. Do you want to undo those changes?'), (1705, u'A previous installation for this product is in progress. You must undo the changes made by that installation to continue. Do you want to undo those changes?'), (1706, u"An installation package for the product [2] cannot be found. Try the installation again using a valid copy of the installation package '[3]'."), (1707, u'Installation completed successfully.'), (1708, u'Installation failed.'), (1709, u'Product: [2] -- [3]'), (1710, u'You may either restore your computer to its previous state or continue the install later. Would you like to restore?'), (1711, u'An error occurred while writing installation information to disk. Check to make sure enough disk space is available, and click Retry, or Cancel to end the install.'), (1712, u'One or more of the files required to restore your computer to its previous state could not be found. Restoration will not be possible.'), (1713, u'[2] cannot install one of its required products. Contact your technical support group. {{System Error: [3].}}'), (1714, u'The older version of [2] cannot be removed. Contact your technical support group. {{System Error [3].}}'), (1715, u'Installed [2]'), (1716, u'Configured [2]'), (1717, u'Removed [2]'), (1718, u'File [2] was rejected by digital signature policy.'), (1719, u'The Windows Installer Service could not be accessed. This can occur if you are running Windows in safe mode, or if the Windows Installer is not correctly installed. Contact your support personnel for assistance.'), (1720, u'There is a problem with this Windows Installer package. A script required for this install to complete could not be run. Contact your support personnel or package vendor. {{Custom action [2] script error [3], [4]: [5] Line [6], Column [7], [8] }}'), (1721, u'There is a problem with this Windows Installer package. A program required for this install to complete could not be run. Contact your support personnel or package vendor. {{Action: [2], location: [3], command: [4] }}'), (1722, u'There is a problem with this Windows Installer package. A program run as part of the setup did not finish as expected. Contact your support personnel or package vendor. {{Action [2], location: [3], command: [4] }}'), (1723, u'There is a problem with this Windows Installer package. A DLL required for this install to complete could not be run. Contact your support personnel or package vendor. {{Action [2], entry: [3], library: [4] }}'), (1724, u'Removal completed successfully.'), (1725, u'Removal failed.'), (1726, u'Advertisement completed successfully.'), (1727, u'Advertisement failed.'), (1728, u'Configuration completed successfully.'), (1729, u'Configuration failed.'), (1730, u'You must be an Administrator to remove this application. To remove this application, you can log on as an Administrator, or contact your technical support group for assistance.'), (1801, u'The path [2] is not valid. Please specify a valid path.'), (1802, u'Out of memory. Shut down other applications before retrying.'), (1803, u'There is no disk in drive [2]. Please insert one and click Retry, or click Cancel to go back to the previously selected volume.'), (1804, u'There is no disk in drive [2]. Please insert one and click Retry, or click Cancel to return to the browse dialog and select a different volume.'), (1805, u'The folder [2] does not exist. Please enter a path to an existing folder.'), (1806, u'You have insufficient privileges to read this folder.'), (1807, u'A valid destination folder for the install could not be determined.'), (1901, u'Error attempting to read from the source install database: [2].'), (1902, u'Scheduling reboot operation: Renaming file [2] to [3]. Must reboot to complete operation.'), (1903, u'Scheduling reboot operation: Deleting file [2]. Must reboot to complete operation.'), (1904, u'Module [2] failed to register. HRESULT [3]. Contact your support personnel.'), (1905, u'Module [2] failed to unregister. HRESULT [3]. Contact your support personnel.'), (1906, u'Failed to cache package [2]. Error: [3]. Contact your support personnel.'), (1907, u'Could not register font [2]. Verify that you have sufficient permissions to install fonts, and that the system supports this font.'), (1908, u'Could not unregister font [2]. Verify that you that you have sufficient permissions to remove fonts.'), (1909, u'Could not create Shortcut [2]. Verify that the destination folder exists and that you can access it.'), (1910, u'Could not remove Shortcut [2]. Verify that the shortcut file exists and that you can access it.'), (1911, u'Could not register type library for file [2]. Contact your support personnel.'), (1912, u'Could not unregister type library for file [2]. Contact your support personnel.'), (1913, u'Could not update the ini file [2][3]. Verify that the file exists and that you can access it.'), (1914, u'Could not schedule file [2] to replace file [3] on reboot. Verify that you have write permissions to file [3].'), (1915, u'Error removing ODBC driver manager, ODBC error [2]: [3]. Contact your support personnel.'), (1916, u'Error installing ODBC driver manager, ODBC error [2]: [3]. Contact your support personnel.'), (1917, u'Error removing ODBC driver: [4], ODBC error [2]: [3]. Verify that you have sufficient privileges to remove ODBC drivers.'), (1918, u'Error installing ODBC driver: [4], ODBC error [2]: [3]. Verify that the file [4] exists and that you can access it.'), (1919, u'Error configuring ODBC data source: [4], ODBC error [2]: [3]. Verify that the file [4] exists and that you can access it.'), (1920, u"Service '[2]' ([3]) failed to start. Verify that you have sufficient privileges to start system services."), (1921, u"Service '[2]' ([3]) could not be stopped. Verify that you have sufficient privileges to stop system services."), (1922, u"Service '[2]' ([3]) could not be deleted. Verify that you have sufficient privileges to remove system services."), (1923, u"Service '[2]' ([3]) could not be installed. Verify that you have sufficient privileges to install system services."), (1924, u"Could not update environment variable '[2]'. Verify that you have sufficient privileges to modify environment variables."), (1925, u'You do not have sufficient privileges to complete this installation for all users of the machine. Log on as administrator and then retry this installation.'), (1926, u"Could not set file security for file '[3]'. Error: [2]. Verify that you have sufficient privileges to modify the security permissions for this file."), (1927, u'Component Services (COM+ 1.0) are not installed on this computer. This installation requires Component Services in order to complete successfully. Component Services are available on Windows 2000.'), (1928, u'Error registering COM+ Application. Contact your support personnel for more information.'), (1929, u'Error unregistering COM+ Application. Contact your support personnel for more information.'), (1930, u"The description for service '[2]' ([3]) could not be changed."), (1931, u'The Windows Installer service cannot update the system file [2] because the file is protected by Windows. You may need to update your operating system for this program to work correctly. {{Package version: [3], OS Protected version: [4]}}'), (1932, u'The Windows Installer service cannot update the protected Windows file [2]. {{Package version: [3], OS Protected version: [4], SFP Error: [5]}}'), (1933, u'The Windows Installer service cannot update one or more protected Windows files. {{SFP Error: [2]. List of protected files:\\r\\n[3]}}'), (1934, u'User installations are disabled via policy on the machine.'), (1935, u'An error occurred during the installation of assembly component [2]. HRESULT: [3]. {{assembly interface: [4], function: [5], assembly name: [6]}}'), ] tables=['AdminExecuteSequence', 'AdminUISequence', 'AdvtExecuteSequence', 'BBControl', 'Billboard', 'Binary', 'CheckBox', 'Property', 'ComboBox', 'Control', 'ListBox', 'ActionText', 'ControlCondition', 'ControlEvent', 'Dialog', 'EventMapping', 'InstallExecuteSequence', 'InstallUISequence', 'ListView', 'RadioButton', 'TextStyle', 'UIText', '_Validation', 'Error']
lgpl-3.0
yochow/autotest
frontend/afe/simplejson/scanner.py
1
1997
""" Iterator based sre token scanner """ import sre_parse, sre_compile, sre_constants from sre_constants import BRANCH, SUBPATTERN from re import VERBOSE, MULTILINE, DOTALL import re __all__ = ['Scanner', 'pattern'] FLAGS = (VERBOSE | MULTILINE | DOTALL) class Scanner(object): def __init__(self, lexicon, flags=FLAGS): self.actions = [None] # combine phrases into a compound pattern s = sre_parse.Pattern() s.flags = flags p = [] for idx, token in enumerate(lexicon): phrase = token.pattern try: subpattern = sre_parse.SubPattern(s, [(SUBPATTERN, (idx + 1, sre_parse.parse(phrase, flags)))]) except sre_constants.error: raise p.append(subpattern) self.actions.append(token) p = sre_parse.SubPattern(s, [(BRANCH, (None, p))]) self.scanner = sre_compile.compile(p) def iterscan(self, string, idx=0, context=None): """ Yield match, end_idx for each match """ match = self.scanner.scanner(string, idx).match actions = self.actions lastend = idx end = len(string) while True: m = match() if m is None: break matchbegin, matchend = m.span() if lastend == matchend: break action = actions[m.lastindex] if action is not None: rval, next_pos = action(m, context) if next_pos is not None and next_pos != matchend: # "fast forward" the scanner matchend = next_pos match = self.scanner.scanner(string, matchend).match yield rval, matchend lastend = matchend def pattern(pattern, flags=FLAGS): def decorator(fn): fn.pattern = pattern fn.regex = re.compile(pattern, flags) return fn return decorator
gpl-2.0
javachengwc/hue
desktop/core/ext-py/Django-1.6.10/tests/inspectdb/tests.py
48
9021
# -*- encoding: utf-8 -*- from __future__ import unicode_literals import re from django.core.management import call_command from django.db import connection from django.test import TestCase, skipUnlessDBFeature from django.utils.unittest import expectedFailure from django.utils.six import PY3, StringIO if connection.vendor == 'oracle': expectedFailureOnOracle = expectedFailure else: expectedFailureOnOracle = lambda f: f class InspectDBTestCase(TestCase): def test_stealth_table_name_filter_option(self): out = StringIO() # Lets limit the introspection to tables created for models of this # application call_command('inspectdb', table_name_filter=lambda tn:tn.startswith('inspectdb_'), stdout=out) error_message = "inspectdb has examined a table that should have been filtered out." # contrib.contenttypes is one of the apps always installed when running # the Django test suite, check that one of its tables hasn't been # inspected self.assertNotIn("class DjangoContentType(models.Model):", out.getvalue(), msg=error_message) def make_field_type_asserter(self): """Call inspectdb and return a function to validate a field type in its output""" out = StringIO() call_command('inspectdb', table_name_filter=lambda tn: tn.startswith('inspectdb_columntypes'), stdout=out) output = out.getvalue() def assertFieldType(name, definition): out_def = re.search(r'^\s*%s = (models.*)$' % name, output, re.MULTILINE).groups()[0] self.assertEqual(definition, out_def) return assertFieldType # Inspecting oracle DB doesn't produce correct results, see #19884 @expectedFailureOnOracle def test_field_types(self): """Test introspection of various Django field types""" assertFieldType = self.make_field_type_asserter() assertFieldType('char_field', "models.CharField(max_length=10)") assertFieldType('comma_separated_int_field', "models.CharField(max_length=99)") assertFieldType('date_field', "models.DateField()") assertFieldType('date_time_field', "models.DateTimeField()") assertFieldType('email_field', "models.CharField(max_length=75)") assertFieldType('file_field', "models.CharField(max_length=100)") assertFieldType('file_path_field', "models.CharField(max_length=100)") if connection.vendor == 'postgresql': # Only PostgreSQL has a specific type assertFieldType('ip_address_field', "models.GenericIPAddressField()") assertFieldType('gen_ip_adress_field', "models.GenericIPAddressField()") else: assertFieldType('ip_address_field', "models.CharField(max_length=15)") assertFieldType('gen_ip_adress_field', "models.CharField(max_length=39)") assertFieldType('slug_field', "models.CharField(max_length=50)") assertFieldType('text_field', "models.TextField()") assertFieldType('time_field', "models.TimeField()") assertFieldType('url_field', "models.CharField(max_length=200)") def test_number_field_types(self): """Test introspection of various Django field types""" assertFieldType = self.make_field_type_asserter() assertFieldType('id', "models.IntegerField(primary_key=True)") assertFieldType('big_int_field', "models.BigIntegerField()") if connection.vendor == 'mysql': # No native boolean type on MySQL assertFieldType('bool_field', "models.IntegerField()") assertFieldType('null_bool_field', "models.IntegerField(blank=True, null=True)") else: assertFieldType('bool_field', "models.BooleanField()") assertFieldType('null_bool_field', "models.NullBooleanField()") if connection.vendor == 'sqlite': # Guessed arguments, see #5014 assertFieldType('decimal_field', "models.DecimalField(max_digits=10, decimal_places=5) " "# max_digits and decimal_places have been guessed, as this database handles decimal fields as float") else: assertFieldType('decimal_field', "models.DecimalField(max_digits=6, decimal_places=1)") assertFieldType('float_field', "models.FloatField()") assertFieldType('int_field', "models.IntegerField()") if connection.vendor == 'sqlite': assertFieldType('pos_int_field', "models.PositiveIntegerField()") assertFieldType('pos_small_int_field', "models.PositiveSmallIntegerField()") else: # 'unsigned' property undetected on other backends assertFieldType('pos_int_field', "models.IntegerField()") if connection.vendor == 'postgresql': assertFieldType('pos_small_int_field', "models.SmallIntegerField()") else: assertFieldType('pos_small_int_field', "models.IntegerField()") if connection.vendor in ('sqlite', 'postgresql'): assertFieldType('small_int_field', "models.SmallIntegerField()") else: assertFieldType('small_int_field', "models.IntegerField()") @skipUnlessDBFeature('can_introspect_foreign_keys') def test_attribute_name_not_python_keyword(self): out = StringIO() # Lets limit the introspection to tables created for models of this # application call_command('inspectdb', table_name_filter=lambda tn:tn.startswith('inspectdb_'), stdout=out) output = out.getvalue() error_message = "inspectdb generated an attribute name which is a python keyword" # Recursive foreign keys should be set to 'self' self.assertIn("parent = models.ForeignKey('self')", output) self.assertNotIn("from = models.ForeignKey(InspectdbPeople)", output, msg=error_message) # As InspectdbPeople model is defined after InspectdbMessage, it should be quoted self.assertIn("from_field = models.ForeignKey('InspectdbPeople', db_column='from_id')", output) self.assertIn("people_pk = models.ForeignKey(InspectdbPeople, primary_key=True)", output) self.assertIn("people_unique = models.ForeignKey(InspectdbPeople, unique=True)", output) def test_digits_column_name_introspection(self): """Introspection of column names consist/start with digits (#16536/#17676)""" out = StringIO() # Lets limit the introspection to tables created for models of this # application call_command('inspectdb', table_name_filter=lambda tn:tn.startswith('inspectdb_'), stdout=out) output = out.getvalue() error_message = "inspectdb generated a model field name which is a number" self.assertNotIn(" 123 = models.CharField", output, msg=error_message) self.assertIn("number_123 = models.CharField", output) error_message = "inspectdb generated a model field name which starts with a digit" self.assertNotIn(" 4extra = models.CharField", output, msg=error_message) self.assertIn("number_4extra = models.CharField", output) self.assertNotIn(" 45extra = models.CharField", output, msg=error_message) self.assertIn("number_45extra = models.CharField", output) def test_special_column_name_introspection(self): """ Introspection of column names containing special characters, unsuitable for Python identifiers """ out = StringIO() call_command('inspectdb', stdout=out) output = out.getvalue() base_name = 'Field' if connection.vendor != 'oracle' else 'field' self.assertIn("field = models.IntegerField()", output) self.assertIn("field_field = models.IntegerField(db_column='%s_')" % base_name, output) self.assertIn("field_field_0 = models.IntegerField(db_column='%s__')" % base_name, output) self.assertIn("field_field_1 = models.IntegerField(db_column='__field')", output) self.assertIn("prc_x = models.IntegerField(db_column='prc(%) x')", output) if PY3: # Python 3 allows non-ascii identifiers self.assertIn("tamaño = models.IntegerField()", output) else: self.assertIn("tama_o = models.IntegerField(db_column='tama\\xf1o')", output) def test_managed_models(self): """Test that by default the command generates models with `Meta.managed = False` (#14305)""" out = StringIO() call_command('inspectdb', table_name_filter=lambda tn:tn.startswith('inspectdb_columntypes'), stdout=out) output = out.getvalue() self.longMessage = False self.assertIn(" managed = False", output, msg='inspectdb should generate unmanaged models.')
apache-2.0
beeftornado/sentry
tests/sentry/receivers/test_onboarding.py
1
14903
from __future__ import absolute_import from django.utils import timezone from sentry.models import ( OnboardingTask, OnboardingTaskStatus, OrganizationOnboardingTask, OrganizationOption, Rule, ) from sentry.signals import ( event_processed, project_created, first_event_pending, first_event_received, member_invited, member_joined, plugin_enabled, issue_tracker_used, alert_rule_created, ) from sentry.plugins.bases import IssueTrackingPlugin from sentry.testutils import TestCase from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.utils.samples import load_data class OrganizationOnboardingTaskTest(TestCase): def test_no_existing_task(self): now = timezone.now() project = self.create_project(first_event=now) event = self.store_event(data={}, project_id=project.id) first_event_received.send(project=project, event=event, sender=type(project)) task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.FIRST_EVENT ) assert task.status == OnboardingTaskStatus.COMPLETE assert task.project_id == project.id assert task.date_completed == project.first_event def test_existing_pending_task(self): now = timezone.now() project = self.create_project(first_event=now) first_event_pending.send(project=project, user=self.user, sender=type(project)) task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.FIRST_EVENT ) assert task.status == OnboardingTaskStatus.PENDING assert task.project_id == project.id event = self.store_event(data={}, project_id=project.id) first_event_received.send(project=project, event=event, sender=type(project)) task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.FIRST_EVENT ) assert task.status == OnboardingTaskStatus.COMPLETE assert task.project_id == project.id assert task.date_completed == project.first_event def test_existing_complete_task(self): now = timezone.now() project = self.create_project(first_event=now) task = OrganizationOnboardingTask.objects.create( organization=project.organization, task=OnboardingTask.FIRST_PROJECT, status=OnboardingTaskStatus.COMPLETE, ) event = self.store_event(data={}, project_id=project.id) first_event_received.send(project=project, event=event, sender=type(project)) task = OrganizationOnboardingTask.objects.get(id=task.id) assert task.status == OnboardingTaskStatus.COMPLETE assert not task.project_id # Tests on the receivers def test_event_processed(self): now = timezone.now() project = self.create_project(first_event=now) event = self.store_event( data={ "event_id": "a" * 32, "platform": "javascript", "timestamp": iso_format(before_now(minutes=1)), "tags": { "sentry:release": "e1b5d1900526feaf20fe2bc9cad83d392136030a", "sentry:user": "id:41656", }, "user": {"ip_address": "0.0.0.0", "id": "41656", "email": "test@example.com"}, "exception": { "values": [ { "stacktrace": { "frames": [ { "data": { "sourcemap": "https://media.sentry.io/_static/29e365f8b0d923bc123e8afa38d890c3/sentry/dist/vendor.js.map" } } ] }, "type": "TypeError", } ] }, }, project_id=project.id, ) event_processed.send(project=project, event=event, sender=type(project)) task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.RELEASE_TRACKING, status=OnboardingTaskStatus.COMPLETE, ) assert task is not None task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.USER_CONTEXT, status=OnboardingTaskStatus.COMPLETE, ) assert task is not None task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.SOURCEMAPS, status=OnboardingTaskStatus.COMPLETE, ) assert task is not None def test_project_created(self): now = timezone.now() project = self.create_project(first_event=now) project_created.send(project=project, user=self.user, sender=type(project)) task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.FIRST_PROJECT, status=OnboardingTaskStatus.COMPLETE, ) assert task is not None def test_first_event_pending(self): now = timezone.now() project = self.create_project(first_event=now) first_event_pending.send(project=project, user=self.user, sender=type(project)) task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.FIRST_EVENT, status=OnboardingTaskStatus.PENDING, ) assert task is not None def test_first_event_received(self): now = timezone.now() project = self.create_project(first_event=now) project_created.send(project=project, user=self.user, sender=type(project)) event = self.store_event( data={"platform": "javascript", "message": "javascript error message"}, project_id=project.id, ) first_event_received.send(project=project, event=event, sender=type(project)) task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.FIRST_EVENT, status=OnboardingTaskStatus.COMPLETE, ) assert task is not None assert "platform" in task.data assert task.data["platform"] == "javascript" second_project = self.create_project(first_event=now) project_created.send(project=second_project, user=self.user, sender=type(second_project)) second_task = OrganizationOnboardingTask.objects.get( organization=second_project.organization, task=OnboardingTask.SECOND_PLATFORM, status=OnboardingTaskStatus.PENDING, ) assert second_task is not None second_event = self.store_event( data={"platform": "python", "message": "python error message"}, project_id=second_project.id, ) first_event_received.send( project=second_project, event=second_event, sender=type(second_project) ) second_task = OrganizationOnboardingTask.objects.get( organization=second_project.organization, task=OnboardingTask.SECOND_PLATFORM, status=OnboardingTaskStatus.COMPLETE, ) assert second_task is not None assert "platform" in second_task.data assert second_task.data["platform"] == "python" assert task.data["platform"] != second_task.data["platform"] def test_first_transaction_received(self): project = self.create_project() event_data = load_data("transaction") min_ago = iso_format(before_now(minutes=1)) event_data.update({"start_timestamp": min_ago, "timestamp": min_ago}) event = self.store_event(data=event_data, project_id=project.id) first_event_received.send(project=project, event=event, sender=type(project)) task = OrganizationOnboardingTask.objects.get( organization=project.organization, task=OnboardingTask.FIRST_TRANSACTION, status=OnboardingTaskStatus.COMPLETE, ) assert task is not None def test_member_invited(self): user = self.create_user(email="test@example.org") member = self.create_member(organization=self.organization, teams=[self.team], user=user) member_invited.send(member=member, user=user, sender=type(member)) task = OrganizationOnboardingTask.objects.get( organization=self.organization, task=OnboardingTask.INVITE_MEMBER, status=OnboardingTaskStatus.PENDING, ) assert task is not None def test_member_joined(self): user = self.create_user(email="test@example.org") member = self.create_member(organization=self.organization, teams=[self.team], user=user) member_joined.send(member=member, organization=self.organization, sender=type(member)) task = OrganizationOnboardingTask.objects.get( organization=self.organization, task=OnboardingTask.INVITE_MEMBER, status=OnboardingTaskStatus.COMPLETE, ) assert task is not None user2 = self.create_user(email="test@example.com") member2 = self.create_member(organization=self.organization, teams=[self.team], user=user2) member_joined.send(member=member2, organization=self.organization, sender=type(member2)) task = OrganizationOnboardingTask.objects.get( organization=self.organization, task=OnboardingTask.INVITE_MEMBER, status=OnboardingTaskStatus.COMPLETE, ) assert task.data["invited_member_id"] == member.id def test_issue_tracker_onboarding(self): plugin_enabled.send( plugin=IssueTrackingPlugin(), project=self.project, user=self.user, sender=type(IssueTrackingPlugin), ) task = OrganizationOnboardingTask.objects.get( organization=self.organization, task=OnboardingTask.ISSUE_TRACKER, status=OnboardingTaskStatus.PENDING, ) assert task is not None issue_tracker_used.send( plugin=IssueTrackingPlugin(), project=self.project, user=self.user, sender=type(IssueTrackingPlugin), ) task = OrganizationOnboardingTask.objects.get( organization=self.organization, task=OnboardingTask.ISSUE_TRACKER, status=OnboardingTaskStatus.COMPLETE, ) assert task is not None def test_alert_added(self): alert_rule_created.send( rule=Rule(id=1), project=self.project, user=self.user, rule_type="issue", sender=type(Rule), is_api_token=False, ) task = OrganizationOnboardingTask.objects.get( organization=self.organization, task=OnboardingTask.ALERT_RULE, status=OnboardingTaskStatus.COMPLETE, ) assert task is not None def test_onboarding_complete(self): now = timezone.now() user = self.create_user(email="test@example.org") project = self.create_project(first_event=now) second_project = self.create_project(first_event=now) second_event = self.store_event( data={"platform": "python", "message": "python error message"}, project_id=second_project.id, ) event = self.store_event( data={ "event_id": "a" * 32, "platform": "javascript", "timestamp": iso_format(before_now(minutes=1)), "tags": { "sentry:release": "e1b5d1900526feaf20fe2bc9cad83d392136030a", "sentry:user": "id:41656", }, "user": {"ip_address": "0.0.0.0", "id": "41656", "email": "test@example.com"}, "exception": { "values": [ { "stacktrace": { "frames": [ { "data": { "sourcemap": "https://media.sentry.io/_static/29e365f8b0d923bc123e8afa38d890c3/sentry/dist/vendor.js.map" } } ] }, "type": "TypeError", } ] }, }, project_id=project.id, ) event_data = load_data("transaction") min_ago = iso_format(before_now(minutes=1)) event_data.update({"start_timestamp": min_ago, "timestamp": min_ago}) transaction = self.store_event(data=event_data, project_id=project.id) first_event_received.send(project=project, event=transaction, sender=type(project)) member = self.create_member(organization=self.organization, teams=[self.team], user=user) event_processed.send(project=project, event=event, sender=type(project)) project_created.send(project=project, user=user, sender=type(project)) project_created.send(project=second_project, user=user, sender=type(second_project)) first_event_received.send(project=project, event=event, sender=type(project)) first_event_received.send( project=second_project, event=second_event, sender=type(second_project) ) member_joined.send(member=member, organization=self.organization, sender=type(member)) plugin_enabled.send( plugin=IssueTrackingPlugin(), project=project, user=user, sender=type(IssueTrackingPlugin), ) issue_tracker_used.send( plugin=IssueTrackingPlugin(), project=project, user=user, sender=type(IssueTrackingPlugin), ) alert_rule_created.send( rule=Rule(id=1), project=self.project, user=self.user, rule_type="issue", sender=type(Rule), is_api_token=False, ) assert ( OrganizationOption.objects.filter( organization=self.organization, key="onboarding:complete" ).count() == 1 )
bsd-3-clause
ujac81/PiBlaster
Pi/PyBlaster/src/lircthread.py
1
1516
"""lircthread.py -- Thread to read remote controller via lirc """ import Queue import lirc import threading import time class LircThread(threading.Thread): """ """ def __init__(self, main): """ """ threading.Thread.__init__(self) self.main = main self.queue = Queue.Queue() self.queue_lock = threading.Lock() self.lircsock = None # end __init__() # def run(self): """ """ #if not self.main.settings.use_lirc: # return self.lircsock = lirc.init("pyblaster", "/root/.lircrc", blocking=False) while self.main.keep_run: read = lirc.nextcode() if len(read): self.queue_lock.acquire() self.queue.put(read[0]) self.queue_lock.release() time.sleep(0.05) # read each 50 ms lirc.deinit() # end run() # def queue_not_empty(self): if not self.queue.empty(): return True return False def read_command(self): """dry run queue and return last command if such -- None else """ result = None while not self.queue.empty(): self.queue_lock.acquire() try: result = self.queue.get_nowait() except Queue.Empty: self.queue_lock.release() return None self.queue_lock.release() return result
gpl-3.0
javachengwc/hue
desktop/core/ext-py/Django-1.6.10/tests/logging_tests/tests.py
52
12810
from __future__ import unicode_literals import copy import logging import sys import warnings from django.conf import LazySettings from django.core import mail from django.test import TestCase, RequestFactory from django.test.utils import override_settings, patch_logger from django.utils.encoding import force_text from django.utils.log import (CallbackFilter, RequireDebugFalse, RequireDebugTrue) from django.utils.six import StringIO from django.utils.unittest import skipUnless from admin_scripts.tests import AdminScriptTestCase from .logconfig import MyEmailBackend PYVERS = sys.version_info[:2] # logging config prior to using filter with mail_admins OLD_LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } class LoggingFiltersTest(TestCase): def test_require_debug_false_filter(self): """ Test the RequireDebugFalse filter class. """ filter_ = RequireDebugFalse() with self.settings(DEBUG=True): self.assertEqual(filter_.filter("record is not used"), False) with self.settings(DEBUG=False): self.assertEqual(filter_.filter("record is not used"), True) def test_require_debug_true_filter(self): """ Test the RequireDebugTrue filter class. """ filter_ = RequireDebugTrue() with self.settings(DEBUG=True): self.assertEqual(filter_.filter("record is not used"), True) with self.settings(DEBUG=False): self.assertEqual(filter_.filter("record is not used"), False) class DefaultLoggingTest(TestCase): def setUp(self): self.logger = logging.getLogger('django') self.old_stream = self.logger.handlers[0].stream def tearDown(self): self.logger.handlers[0].stream = self.old_stream def test_django_logger(self): """ The 'django' base logger only output anything when DEBUG=True. """ output = StringIO() self.logger.handlers[0].stream = output self.logger.error("Hey, this is an error.") self.assertEqual(output.getvalue(), '') with self.settings(DEBUG=True): self.logger.error("Hey, this is an error.") self.assertEqual(output.getvalue(), 'Hey, this is an error.\n') @skipUnless(PYVERS > (2,6), "warnings captured only in Python >= 2.7") class WarningLoggerTests(TestCase): """ Tests that warnings output for DeprecationWarnings is enabled and captured to the logging system """ def setUp(self): # If tests are invoke with "-Wall" (or any -W flag actually) then # warning logging gets disabled (see django/conf/__init__.py). However, # these tests expect warnings to be logged, so manually force warnings # to the logs. Use getattr() here because the logging capture state is # undocumented and (I assume) brittle. self._old_capture_state = bool(getattr(logging, '_warnings_showwarning', False)) logging.captureWarnings(True) # this convoluted setup is to avoid printing this deprecation to # stderr during test running - as the test runner forces deprecations # to be displayed at the global py.warnings level self.logger = logging.getLogger('py.warnings') self.outputs = [] self.old_streams = [] for handler in self.logger.handlers: self.old_streams.append(handler.stream) self.outputs.append(StringIO()) handler.stream = self.outputs[-1] def tearDown(self): for i, handler in enumerate(self.logger.handlers): self.logger.handlers[i].stream = self.old_streams[i] # Reset warnings state. logging.captureWarnings(self._old_capture_state) @override_settings(DEBUG=True) def test_warnings_capture(self): warnings.warn('Foo Deprecated', DeprecationWarning) output = force_text(self.outputs[0].getvalue()) self.assertTrue('Foo Deprecated' in output) def test_warnings_capture_debug_false(self): warnings.warn('Foo Deprecated', DeprecationWarning) output = force_text(self.outputs[0].getvalue()) self.assertFalse('Foo Deprecated' in output) class CallbackFilterTest(TestCase): def test_sense(self): f_false = CallbackFilter(lambda r: False) f_true = CallbackFilter(lambda r: True) self.assertEqual(f_false.filter("record"), False) self.assertEqual(f_true.filter("record"), True) def test_passes_on_record(self): collector = [] def _callback(record): collector.append(record) return True f = CallbackFilter(_callback) f.filter("a record") self.assertEqual(collector, ["a record"]) class AdminEmailHandlerTest(TestCase): logger = logging.getLogger('django.request') def get_admin_email_handler(self, logger): # Inspired from views/views.py: send_log() # ensuring the AdminEmailHandler does not get filtered out # even with DEBUG=True. admin_email_handler = [ h for h in logger.handlers if h.__class__.__name__ == "AdminEmailHandler" ][0] return admin_email_handler def test_fail_silently(self): admin_email_handler = self.get_admin_email_handler(self.logger) self.assertTrue(admin_email_handler.connection().fail_silently) @override_settings( ADMINS=(('whatever admin', 'admin@example.com'),), EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-' ) def test_accepts_args(self): """ Ensure that user-supplied arguments and the EMAIL_SUBJECT_PREFIX setting are used to compose the email subject. Refs #16736. """ message = "Custom message that says '%s' and '%s'" token1 = 'ping' token2 = 'pong' admin_email_handler = self.get_admin_email_handler(self.logger) # Backup then override original filters orig_filters = admin_email_handler.filters try: admin_email_handler.filters = [] self.logger.error(message, token1, token2) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, ['admin@example.com']) self.assertEqual(mail.outbox[0].subject, "-SuperAwesomeSubject-ERROR: Custom message that says 'ping' and 'pong'") finally: # Restore original filters admin_email_handler.filters = orig_filters @override_settings( ADMINS=(('whatever admin', 'admin@example.com'),), EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-', INTERNAL_IPS=('127.0.0.1',), ) def test_accepts_args_and_request(self): """ Ensure that the subject is also handled if being passed a request object. """ message = "Custom message that says '%s' and '%s'" token1 = 'ping' token2 = 'pong' admin_email_handler = self.get_admin_email_handler(self.logger) # Backup then override original filters orig_filters = admin_email_handler.filters try: admin_email_handler.filters = [] rf = RequestFactory() request = rf.get('/') self.logger.error(message, token1, token2, extra={ 'status_code': 403, 'request': request, } ) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, ['admin@example.com']) self.assertEqual(mail.outbox[0].subject, "-SuperAwesomeSubject-ERROR (internal IP): Custom message that says 'ping' and 'pong'") finally: # Restore original filters admin_email_handler.filters = orig_filters @override_settings( ADMINS=(('admin', 'admin@example.com'),), EMAIL_SUBJECT_PREFIX='', DEBUG=False, ) def test_subject_accepts_newlines(self): """ Ensure that newlines in email reports' subjects are escaped to avoid AdminErrorHandler to fail. Refs #17281. """ message = 'Message \r\n with newlines' expected_subject = 'ERROR: Message \\r\\n with newlines' self.assertEqual(len(mail.outbox), 0) self.logger.error(message) self.assertEqual(len(mail.outbox), 1) self.assertFalse('\n' in mail.outbox[0].subject) self.assertFalse('\r' in mail.outbox[0].subject) self.assertEqual(mail.outbox[0].subject, expected_subject) @override_settings( ADMINS=(('admin', 'admin@example.com'),), EMAIL_SUBJECT_PREFIX='', DEBUG=False, ) def test_truncate_subject(self): """ RFC 2822's hard limit is 998 characters per line. So, minus "Subject: ", the actual subject must be no longer than 989 characters. Refs #17281. """ message = 'a' * 1000 expected_subject = 'ERROR: aa' + 'a' * 980 self.assertEqual(len(mail.outbox), 0) self.logger.error(message) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, expected_subject) @override_settings( ADMINS=(('admin', 'admin@example.com'),), DEBUG=False, ) def test_uses_custom_email_backend(self): """ Refs #19325 """ message = 'All work and no play makes Jack a dull boy' admin_email_handler = self.get_admin_email_handler(self.logger) mail_admins_called = {'called': False} def my_mail_admins(*args, **kwargs): connection = kwargs['connection'] self.assertIsInstance(connection, MyEmailBackend) mail_admins_called['called'] = True # Monkeypatches orig_mail_admins = mail.mail_admins orig_email_backend = admin_email_handler.email_backend mail.mail_admins = my_mail_admins admin_email_handler.email_backend = ( 'logging_tests.logconfig.MyEmailBackend') try: self.logger.error(message) self.assertTrue(mail_admins_called['called']) finally: # Revert Monkeypatches mail.mail_admins = orig_mail_admins admin_email_handler.email_backend = orig_email_backend class SettingsConfigTest(AdminScriptTestCase): """ Test that accessing settings in a custom logging handler does not trigger a circular import error. """ def setUp(self): log_config = """{ 'version': 1, 'handlers': { 'custom_handler': { 'level': 'INFO', 'class': 'logging_tests.logconfig.MyHandler', } } }""" self.write_settings('settings.py', sdict={'LOGGING': log_config}) def tearDown(self): self.remove_settings('settings.py') def test_circular_dependency(self): # validate is just an example command to trigger settings configuration out, err = self.run_manage(['validate']) self.assertNoOutput(err) self.assertOutput(out, "0 errors found") def dictConfig(config): dictConfig.called = True dictConfig.called = False class SettingsConfigureLogging(TestCase): """ Test that calling settings.configure() initializes the logging configuration. """ def test_configure_initializes_logging(self): settings = LazySettings() settings.configure( LOGGING_CONFIG='logging_tests.tests.dictConfig') self.assertTrue(dictConfig.called) class SecurityLoggerTest(TestCase): urls = 'logging_tests.urls' def test_suspicious_operation_creates_log_message(self): with self.settings(DEBUG=True): with patch_logger('django.security.SuspiciousOperation', 'error') as calls: response = self.client.get('/suspicious/') self.assertEqual(len(calls), 1) self.assertEqual(calls[0], 'dubious') def test_suspicious_operation_uses_sublogger(self): with self.settings(DEBUG=True): with patch_logger('django.security.DisallowedHost', 'error') as calls: response = self.client.get('/suspicious_spec/') self.assertEqual(len(calls), 1) self.assertEqual(calls[0], 'dubious')
apache-2.0
yochow/autotest
database/database_connection.py
1
9104
import re, time, traceback import common from autotest_lib.client.common_lib import global_config RECONNECT_FOREVER = object() _DB_EXCEPTIONS = ('DatabaseError', 'OperationalError', 'ProgrammingError') _GLOBAL_CONFIG_NAMES = { 'username' : 'user', 'db_name' : 'database', } def _copy_exceptions(source, destination): for exception_name in _DB_EXCEPTIONS: setattr(destination, exception_name, getattr(source, exception_name)) class _GenericBackend(object): def __init__(self, database_module): self._database_module = database_module self._connection = None self._cursor = None self.rowcount = None _copy_exceptions(database_module, self) def connect(self, host=None, username=None, password=None, db_name=None): """ This is assumed to enable autocommit. """ raise NotImplementedError def disconnect(self): if self._connection: self._connection.close() self._connection = None self._cursor = None def execute(self, query, parameters=None): self._cursor.execute(query, parameters) self.rowcount = self._cursor.rowcount return self._cursor.fetchall() class _MySqlBackend(_GenericBackend): def __init__(self): import MySQLdb super(_MySqlBackend, self).__init__(MySQLdb) @staticmethod def convert_boolean(boolean, conversion_dict): 'Convert booleans to integer strings' return str(int(boolean)) def connect(self, host=None, username=None, password=None, db_name=None): import MySQLdb.converters convert_dict = MySQLdb.converters.conversions convert_dict.setdefault(bool, self.convert_boolean) self._connection = self._database_module.connect( host=host, user=username, passwd=password, db=db_name, conv=convert_dict) self._connection.autocommit(True) self._cursor = self._connection.cursor() class _SqliteBackend(_GenericBackend): def __init__(self): from pysqlite2 import dbapi2 super(_SqliteBackend, self).__init__(dbapi2) self._last_insert_id_re = re.compile(r'\sLAST_INSERT_ID\(\)', re.IGNORECASE) def connect(self, host=None, username=None, password=None, db_name=None): self._connection = self._database_module.connect(db_name) self._connection.isolation_level = None # enable autocommit self._cursor = self._connection.cursor() def execute(self, query, parameters=None): # pysqlite2 uses paramstyle=qmark # TODO: make this more sophisticated if necessary query = query.replace('%s', '?') # pysqlite2 can't handle parameters=None (it throws a nonsense # exception) if parameters is None: parameters = () # sqlite3 doesn't support MySQL's LAST_INSERT_ID(). Instead it has # something similar called LAST_INSERT_ROWID() that will do enough of # what we want (for our non-concurrent unittest use case). query = self._last_insert_id_re.sub(' LAST_INSERT_ROWID()', query) return super(_SqliteBackend, self).execute(query, parameters) _BACKEND_MAP = { 'mysql' : _MySqlBackend, 'sqlite' : _SqliteBackend, } class DatabaseConnection(object): """ Generic wrapper for a database connection. Supports both mysql and sqlite backends. Public attributes: * reconnect_enabled: if True, when an OperationalError occurs the class will try to reconnect to the database automatically. * reconnect_delay_sec: seconds to wait before reconnecting * max_reconnect_attempts: maximum number of time to try reconnecting before giving up. Setting to RECONNECT_FOREVER removes the limit. * rowcount - will hold cursor.rowcount after each call to execute(). * global_config_section - the section in which to find DB information. this should be passed to the constructor, not set later, and may be None, in which case information must be passed to connect(). * debug - if set True, all queries will be printed before being executed """ _DATABASE_ATTRIBUTES = ('db_type', 'host', 'username', 'password', 'db_name') def __init__(self, global_config_section=None): self.global_config_section = global_config_section self._backend = None self.rowcount = None self.debug = False # reconnect defaults self.reconnect_enabled = True self.reconnect_delay_sec = 20 self.max_reconnect_attempts = 10 self._read_options() def _get_option(self, name, provided_value): if provided_value is not None: return provided_value if self.global_config_section: global_config_name = _GLOBAL_CONFIG_NAMES.get(name, name) return global_config.global_config.get_config_value( self.global_config_section, global_config_name) return getattr(self, name, None) def _read_options(self, db_type=None, host=None, username=None, password=None, db_name=None): self.db_type = self._get_option('db_type', db_type) self.host = self._get_option('host', host) self.username = self._get_option('username', username) self.password = self._get_option('password', password) self.db_name = self._get_option('db_name', db_name) def _get_backend(self, db_type): if db_type not in _BACKEND_MAP: raise ValueError('Invalid database type: %s, should be one of %s' % (db_type, ', '.join(_BACKEND_MAP.keys()))) backend_class = _BACKEND_MAP[db_type] return backend_class() def _reached_max_attempts(self, num_attempts): return (self.max_reconnect_attempts is not RECONNECT_FOREVER and num_attempts > self.max_reconnect_attempts) def _is_reconnect_enabled(self, supplied_param): if supplied_param is not None: return supplied_param return self.reconnect_enabled def _connect_backend(self, try_reconnecting=None): num_attempts = 0 while True: try: self._backend.connect(host=self.host, username=self.username, password=self.password, db_name=self.db_name) return except self._backend.OperationalError: num_attempts += 1 if not self._is_reconnect_enabled(try_reconnecting): raise if self._reached_max_attempts(num_attempts): raise traceback.print_exc() print ("Can't connect to database; reconnecting in %s sec" % self.reconnect_delay_sec) time.sleep(self.reconnect_delay_sec) self.disconnect() def connect(self, db_type=None, host=None, username=None, password=None, db_name=None, try_reconnecting=None): """ Parameters passed to this function will override defaults from global config. try_reconnecting, if passed, will override self.reconnect_enabled. """ self.disconnect() self._read_options(db_type, host, username, password, db_name) self._backend = self._get_backend(self.db_type) _copy_exceptions(self._backend, self) self._connect_backend(try_reconnecting) def disconnect(self): if self._backend: self._backend.disconnect() def execute(self, query, parameters=None, try_reconnecting=None): """ Execute a query and return cursor.fetchall(). try_reconnecting, if passed, will override self.reconnect_enabled. """ if self.debug: print 'Executing %s, %s' % (query, parameters) # _connect_backend() contains a retry loop, so don't loop here try: results = self._backend.execute(query, parameters) except self._backend.OperationalError: if not self._is_reconnect_enabled(try_reconnecting): raise traceback.print_exc() print ("MYSQL connection died; reconnecting") self.disconnect() self._connect_backend(try_reconnecting) results = self._backend.execute(query, parameters) self.rowcount = self._backend.rowcount return results def get_database_info(self): return dict((attribute, getattr(self, attribute)) for attribute in self._DATABASE_ATTRIBUTES) @classmethod def get_test_database(cls, file_path=':memory:'): """ Factory method returning a DatabaseConnection for a temporary in-memory database. """ database = cls() database.reconnect_enabled = False database.connect(db_type='sqlite', db_name=file_path) return database
gpl-2.0
yd0str/infernal-twin
build/reportlab/build/lib.linux-i686-2.7/reportlab/pdfbase/_fontdata_enc_macexpert.py
55
3058
MacExpertEncoding = (None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'space', 'exclamsmall', 'Hungarumlautsmall', 'centoldstyle', 'dollaroldstyle', 'dollarsuperior', 'ampersandsmall', 'Acutesmall', 'parenleftsuperior', 'parenrightsuperior', 'twodotenleader', 'onedotenleader', 'comma', 'hyphen', 'period', 'fraction', 'zerooldstyle', 'oneoldstyle', 'twooldstyle', 'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle', 'sevenoldstyle', 'eightoldstyle', 'nineoldstyle', 'colon', 'semicolon', None, 'threequartersemdash', None, 'questionsmall', None, None, None, None, 'Ethsmall', None, None, 'onequarter', 'onehalf', 'threequarters', 'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths', 'onethird', 'twothirds', None, None, None, None, None, None, 'ff', 'fi', 'fl', 'ffi', 'ffl', 'parenleftinferior', None, 'parenrightinferior', 'Circumflexsmall', 'hypheninferior', 'Gravesmall', 'Asmall', 'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall', 'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall', 'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall', 'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall', 'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall', None, None, 'asuperior', 'centsuperior', None, None, None, None, 'Aacutesmall', 'Agravesmall', 'Acircumflexsmall', 'Adieresissmall', 'Atildesmall', 'Aringsmall', 'Ccedillasmall', 'Eacutesmall', 'Egravesmall', 'Ecircumflexsmall', 'Edieresissmall', 'Iacutesmall', 'Igravesmall', 'Icircumflexsmall', 'Idieresissmall', 'Ntildesmall', 'Oacutesmall', 'Ogravesmall', 'Ocircumflexsmall', 'Odieresissmall', 'Otildesmall', 'Uacutesmall', 'Ugravesmall', 'Ucircumflexsmall', 'Udieresissmall', None, 'eightsuperior', 'fourinferior', 'threeinferior', 'sixinferior', 'eightinferior', 'seveninferior', 'Scaronsmall', None, 'centinferior', 'twoinferior', None, 'Dieresissmall', None, 'Caronsmall', 'osuperior', 'fiveinferior', None, 'commainferior', 'periodinferior', 'Yacutesmall', None, 'dollarinferior', None, None, 'Thornsmall', None, 'nineinferior', 'zeroinferior', 'Zcaronsmall', 'AEsmall', 'Oslashsmall', 'questiondownsmall', 'oneinferior', 'Lslashsmall', None, None, None, None, None, None, 'Cedillasmall', None, None, None, None, None, 'OEsmall', 'figuredash', 'hyphensuperior', None, None, None, None, 'exclamdownsmall', None, 'Ydieresissmall', None, 'onesuperior', 'twosuperior', 'threesuperior', 'foursuperior', 'fivesuperior', 'sixsuperior', 'sevensuperior', 'ninesuperior', 'zerosuperior', None, 'esuperior', 'rsuperior', 'tsuperior', None, None, 'isuperior', 'ssuperior', 'dsuperior', None, None, None, None, None, 'lsuperior', 'Ogoneksmall', 'Brevesmall', 'Macronsmall', 'bsuperior', 'nsuperior', 'msuperior', 'commasuperior', 'periodsuperior', 'Dotaccentsmall', 'Ringsmall', None, None, None, None)
gpl-3.0
beeftornado/sentry
src/sentry/tasks/reprocessing.py
1
3307
from __future__ import absolute_import, print_function import logging from datetime import timedelta from django.conf import settings from django.utils import timezone from sentry.tasks.base import instrumented_task from sentry.utils.locking import UnableToAcquireLock logger = logging.getLogger(__name__) @instrumented_task(name="sentry.tasks.reprocess_events", queue="events.reprocess_events") def reprocess_events(project_id, **kwargs): from sentry.models import ProcessingIssue from sentry.coreapi import insert_data_to_database_legacy from sentry import app lock_key = "events:reprocess_events:%s" % project_id have_more = False lock = app.locks.get(lock_key, duration=60) try: with lock.acquire(): raw_events, have_more = ProcessingIssue.objects.find_resolved(project_id) if raw_events: for raw_event in raw_events: insert_data_to_database_legacy(raw_event.data.data, from_reprocessing=True) create_reprocessing_report(project_id=project_id, event_id=raw_event.event_id) # Here we only delete the raw event but leave the # reprocessing report alive. When the queue # eventually kicks in this should clean up. raw_event.delete() except UnableToAcquireLock as error: logger.warning("reprocess_events.fail", extra={"error": error}) # There are more, kick us off again if have_more: reprocess_events.delay(project_id=project_id) def create_reprocessing_report(project_id, event_id): from sentry.models import ReprocessingReport return ReprocessingReport.objects.create(project_id=project_id, event_id=event_id) @instrumented_task(name="sentry.tasks.clear_expired_raw_events", time_limit=15, soft_time_limit=10) def clear_expired_raw_events(): from sentry.models import RawEvent, ProcessingIssue, ReprocessingReport def batched_delete(model_cls, **filter): # Django 1.6's `Queryset.delete` runs in this order: # # 1. Fetch all models # 2. Call all `on_delete`s # 3. Delete from DB (batched `DELETE WHERE id in (...)`) # # Since we attempt to unpickle `NodeField`s in Step 2, we might time # out at that point and never do the delete. # # Better to delete a few rows than none. while True: # Django already loads this into memory, might as well do it # explicitly. Makes check for result emptyness cheaper. result = set(model_cls.objects.filter(**filter)[:200].values_list("pk", flat=True)) if not result: break # Django ORM can't do delete with limit model_cls.objects.filter(pk__in=result).delete() cutoff = timezone.now() - timedelta(days=settings.SENTRY_RAW_EVENT_MAX_AGE_DAYS) # Clear old raw events and reprocessing reports batched_delete(RawEvent, datetime__lt=cutoff) batched_delete(ReprocessingReport, datetime__lt=cutoff) # Processing issues get a bit of extra time before we delete them cutoff = timezone.now() - timedelta(days=int(settings.SENTRY_RAW_EVENT_MAX_AGE_DAYS * 1.3)) batched_delete(ProcessingIssue, datetime__lt=cutoff)
bsd-3-clause