repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringlengths
1
5
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
DataCanvasIO/example-modules
modules/modeling/basic/linear_svc_estimator/main.py
2
1630
#!/usr/bin/env python # -*- coding: utf-8 -*- import random from specparser import get_settings_from_file from pprint import pprint import csv from sklearn.svm import LinearSVC import numpy as np from sklearn.externals import joblib import matplotlib matplotlib.use('Agg') import datetime from matplotlib.backends.backend_pdf import PdfPages import matplotlib.pyplot as plt def drawPrecisionRecall(X,Y,output_file): pdf = PdfPages(output_file) plt.figure(figsize=(len(Y), len(X))) plt.plot(Y, X, 'r-o') plt.title('Precision/Recall') pdf.savefig() # saves the current figure into a pdf page plt.close() pdf.close() def readcolumn(filename): column = [] with open(filename,"r") as fconcl: for line in fconcl: column.append(line.rstrip('\n')) return column def main(): settings = get_settings_from_file("spec.json") print(settings) X = np.genfromtxt(settings.Input.X, delimiter=',', skip_header=1) svc = joblib.load(settings.Input.MODEL) Y_out = svc.predict(X) Y_list = [Y_out] np.savetxt("./conclusion.csv", Y_out, fmt="%d", delimiter=",") conclusion = readcolumn("./conclusion.csv") label = readcolumn(settings.Input.Y) precision_list = [] recall_list = [] hits = 0 for i in range(len(label)): if conclusion[i] == label[i]: hits+=1 precision_list.append(1.0*hits/(i+1)) recall_list.append(1.0*hits/(len(label))) drawPrecisionRecall(precision_list,recall_list,settings.Output.report) print("Done") if __name__ == "__main__": main()
bsd-3-clause
pablo-co/insight-jobs
process_stops.py
1
3206
import sys import traceback from bhulan.processVehicles import importTrucks, initCompute from bhulan.processStops import saveComputedStops from bhulan.util import notify, getTime from pymongo import MongoClient from bhulan.constants import WATTS_DATA_DB_KEY from bhulan.inputOutput import saveStopsToFile import numpy as np from bhulan.merger import merger import getopt import warnings import string import random db = WATTS_DATA_DB_KEY def trucks(filename): importTrucks(filename=filename) def compute(): initCompute() def stops(): saveComputedStops() return 0 def run(func, args): messages = { trucks: "import trucks ", compute: "compute truck dates and centers ", stops: "compute stops and properties" } message = messages[func] try: getTime(func, message, *args) # func(*args) # notify(message) except: print traceback.format_exc() notify(message + "failed") def setupAll(input_file_name): try: run(trucks, [input_file_name]) run(compute, []) run(stops, []) notify("complete setup succeeded!") except: print traceback.format_exc() notify("complete setup failed...") ## # deletes the database and cleans up the collections def dataPurge(db): client = MongoClient() client.drop_database(db) def main(argv): input_file_name = "input.csv" output_file_name = "output.csv" existing_file_name = "existing.csv" non_existing_file_name = "non_existing.csv" hash_name = ''.join(random.choice(string.ascii_uppercase) for i in range(24)) try: opts, args = getopt.getopt(argv, "i:o:e:n:", ["input=", "output=", "existing=", "non_existing="]) except getopt.GetoptError: sys.exit(2) for opt, arg in opts: if opt in ("-i", "--input"): input_file_name = arg elif opt in ("-e", "--existing"): existing_file_name = arg elif opt in ("-n", "--non_existing"): non_existing_file_name = arg elif opt in ("-o", "--output"): output_file_name = arg dataPurge(db) setupAll(input_file_name) run(trucks, [input_file_name]) run(stops, []) run(compute, []) with warnings.catch_warnings(): warnings.simplefilter("ignore") exc = np.array([]) try: exc = np.genfromtxt(non_existing_file_name, dtype=None, delimiter=',') except: print 'Non existing empty' exist = np.genfromtxt(existing_file_name, dtype=None, delimiter=',') i = exist.min() while i < (exist.max() + 1): if i not in exc: saveStopsToFile(hash_name, i) i += 1 # saveStopsToFile(216) # if len(sys.argv) == 2: # if sys.argv[1] == "all": # getTime(setupAll, "Ran complete setup") # if sys.argv[1] == "trucks": # run(trucks, []) # if sys.argv[1] == "stops": # run(stops, []) # if sys.argv[1] == "compute": # run(compute, []) merger(existing_file_name, output_file_name, hash_name) if __name__ == "__main__": main(sys.argv[1:])
mit
Matty-Downing2169/opencamlib
scripts/batchdropcutter_test_2.py
7
2142
import ocl import pyocl import camvtk import time import vtk import datetime import math if __name__ == "__main__": print ocl.revision() myscreen = camvtk.VTKScreen() stl = camvtk.STLSurf("../stl/gnu_tux_mod.stl") #stl = camvtk.STLSurf("../stl/beet_mm.stl") #stl = camvtk.STLSurf("../stl/Blade.stl") myscreen.addActor(stl) stl.SetWireframe() stl.SetColor((0.5,0.5,0.5)) polydata = stl.src.GetOutput() s = ocl.STLSurf() camvtk.vtkPolyData2OCLSTL(polydata, s) print "STL surface read ", s.size(), " triangles" length=5 cutter = ocl.BallCutter(1.4321, length) #cutter = ocl.CylCutter(1.123, length) #cutter = ocl.BullCutter(1.4123, 0.5, length) #cutter = ocl.ConeCutter(0.43, math.pi/7, length) print cutter minx=0 dx=0.06 maxx=9 miny=0 dy=1 maxy=12 z=-5 clpoints = pyocl.CLPointGrid(minx,dx,maxx,miny,dy,maxy,z) print "generated grid with", len(clpoints)," CL-points" # batchdropcutter bdc1 = ocl.BatchDropCutter() bdc1.setSTL(s) bdc1.setCutter(cutter) for p in clpoints: bdc1.appendPoint(p) t_before = time.time() bdc1.run() t_after = time.time() calctime = t_after-t_before print " done in ", calctime," s" clpts = bdc1.getCLPoints() print "rendering...", camvtk.drawCLPointCloud(myscreen, clpts) print "done" myscreen.camera.SetPosition(3, 23, 15) myscreen.camera.SetFocalPoint(4, 5, 0) t = camvtk.Text() t.SetText("OpenCAMLib") t.SetPos( (myscreen.width-200, myscreen.height-30) ) myscreen.addActor( t) t2 = camvtk.Text() stltext = "%i triangles\n%i CL-points\n%0.1f seconds" % (s.size(), len(clpts), calctime) t2.SetText(stltext) t2.SetPos( (50, myscreen.height-200) ) myscreen.addActor( t2) t3 = camvtk.Text() ctext = "Cutter: %s" % ( str(cutter) ) t3.SetText(ctext) t3.SetPos( (50, myscreen.height-250) ) myscreen.addActor( t3) myscreen.render() myscreen.iren.Start() raw_input("Press Enter to terminate")
gpl-3.0
spisneha25/django
django/utils/dateformat.py
365
10712
""" PHP date() style date formatting See http://www.php.net/date for format strings Usage: >>> import datetime >>> d = datetime.datetime.now() >>> df = DateFormat(d) >>> print(df.format('jS F Y H:i')) 7th October 2003 11:39 >>> """ from __future__ import unicode_literals import calendar import datetime import re import time from django.utils import six from django.utils.dates import ( MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR, ) from django.utils.encoding import force_text from django.utils.timezone import get_default_timezone, is_aware, is_naive from django.utils.translation import ugettext as _ re_formatchars = re.compile(r'(?<!\\)([aAbBcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])') re_escaped = re.compile(r'\\(.)') class Formatter(object): def format(self, formatstr): pieces = [] for i, piece in enumerate(re_formatchars.split(force_text(formatstr))): if i % 2: pieces.append(force_text(getattr(self, piece)())) elif piece: pieces.append(re_escaped.sub(r'\1', piece)) return ''.join(pieces) class TimeFormat(Formatter): def __init__(self, obj): self.data = obj self.timezone = None # We only support timezone when formatting datetime objects, # not date objects (timezone information not appropriate), # or time objects (against established django policy). if isinstance(obj, datetime.datetime): if is_naive(obj): self.timezone = get_default_timezone() else: self.timezone = obj.tzinfo def a(self): "'a.m.' or 'p.m.'" if self.data.hour > 11: return _('p.m.') return _('a.m.') def A(self): "'AM' or 'PM'" if self.data.hour > 11: return _('PM') return _('AM') def B(self): "Swatch Internet time" raise NotImplementedError('may be implemented in a future release') def e(self): """ Timezone name. If timezone information is not available, this method returns an empty string. """ if not self.timezone: return "" try: if hasattr(self.data, 'tzinfo') and self.data.tzinfo: # Have to use tzinfo.tzname and not datetime.tzname # because datatime.tzname does not expect Unicode return self.data.tzinfo.tzname(self.data) or "" except NotImplementedError: pass return "" def f(self): """ Time, in 12-hour hours and minutes, with minutes left off if they're zero. Examples: '1', '1:30', '2:05', '2' Proprietary extension. """ if self.data.minute == 0: return self.g() return '%s:%s' % (self.g(), self.i()) def g(self): "Hour, 12-hour format without leading zeros; i.e. '1' to '12'" if self.data.hour == 0: return 12 if self.data.hour > 12: return self.data.hour - 12 return self.data.hour def G(self): "Hour, 24-hour format without leading zeros; i.e. '0' to '23'" return self.data.hour def h(self): "Hour, 12-hour format; i.e. '01' to '12'" return '%02d' % self.g() def H(self): "Hour, 24-hour format; i.e. '00' to '23'" return '%02d' % self.G() def i(self): "Minutes; i.e. '00' to '59'" return '%02d' % self.data.minute def O(self): """ Difference to Greenwich time in hours; e.g. '+0200', '-0430'. If timezone information is not available, this method returns an empty string. """ if not self.timezone: return "" seconds = self.Z() sign = '-' if seconds < 0 else '+' seconds = abs(seconds) return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60) def P(self): """ Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off if they're zero and the strings 'midnight' and 'noon' if appropriate. Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.' Proprietary extension. """ if self.data.minute == 0 and self.data.hour == 0: return _('midnight') if self.data.minute == 0 and self.data.hour == 12: return _('noon') return '%s %s' % (self.f(), self.a()) def s(self): "Seconds; i.e. '00' to '59'" return '%02d' % self.data.second def T(self): """ Time zone of this machine; e.g. 'EST' or 'MDT'. If timezone information is not available, this method returns an empty string. """ if not self.timezone: return "" name = self.timezone.tzname(self.data) if self.timezone else None if name is None: name = self.format('O') return six.text_type(name) def u(self): "Microseconds; i.e. '000000' to '999999'" return '%06d' % self.data.microsecond def Z(self): """ Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for timezones west of UTC is always negative, and for those east of UTC is always positive. If timezone information is not available, this method returns an empty string. """ if not self.timezone: return "" offset = self.timezone.utcoffset(self.data) # `offset` is a datetime.timedelta. For negative values (to the west of # UTC) only days can be negative (days=-1) and seconds are always # positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0) # Positive offsets have days=0 return offset.days * 86400 + offset.seconds class DateFormat(TimeFormat): year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334] def b(self): "Month, textual, 3 letters, lowercase; e.g. 'jan'" return MONTHS_3[self.data.month] def c(self): """ ISO 8601 Format Example : '2008-01-02T10:30:00.000123' """ return self.data.isoformat() def d(self): "Day of the month, 2 digits with leading zeros; i.e. '01' to '31'" return '%02d' % self.data.day def D(self): "Day of the week, textual, 3 letters; e.g. 'Fri'" return WEEKDAYS_ABBR[self.data.weekday()] def E(self): "Alternative month names as required by some locales. Proprietary extension." return MONTHS_ALT[self.data.month] def F(self): "Month, textual, long; e.g. 'January'" return MONTHS[self.data.month] def I(self): "'1' if Daylight Savings Time, '0' otherwise." if self.timezone and self.timezone.dst(self.data): return '1' else: return '0' def j(self): "Day of the month without leading zeros; i.e. '1' to '31'" return self.data.day def l(self): "Day of the week, textual, long; e.g. 'Friday'" return WEEKDAYS[self.data.weekday()] def L(self): "Boolean for whether it is a leap year; i.e. True or False" return calendar.isleap(self.data.year) def m(self): "Month; i.e. '01' to '12'" return '%02d' % self.data.month def M(self): "Month, textual, 3 letters; e.g. 'Jan'" return MONTHS_3[self.data.month].title() def n(self): "Month without leading zeros; i.e. '1' to '12'" return self.data.month def N(self): "Month abbreviation in Associated Press style. Proprietary extension." return MONTHS_AP[self.data.month] def o(self): "ISO 8601 year number matching the ISO week number (W)" return self.data.isocalendar()[0] def r(self): "RFC 2822 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'" return self.format('D, j M Y H:i:s O') def S(self): "English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'" if self.data.day in (11, 12, 13): # Special case return 'th' last = self.data.day % 10 if last == 1: return 'st' if last == 2: return 'nd' if last == 3: return 'rd' return 'th' def t(self): "Number of days in the given month; i.e. '28' to '31'" return '%02d' % calendar.monthrange(self.data.year, self.data.month)[1] def U(self): "Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)" if isinstance(self.data, datetime.datetime) and is_aware(self.data): return int(calendar.timegm(self.data.utctimetuple())) else: return int(time.mktime(self.data.timetuple())) def w(self): "Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)" return (self.data.weekday() + 1) % 7 def W(self): "ISO-8601 week number of year, weeks starting on Monday" # Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt week_number = None jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1 weekday = self.data.weekday() + 1 day_of_year = self.z() if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4: if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year - 1)): week_number = 53 else: week_number = 52 else: if calendar.isleap(self.data.year): i = 366 else: i = 365 if (i - day_of_year) < (4 - weekday): week_number = 1 else: j = day_of_year + (7 - weekday) + (jan1_weekday - 1) week_number = j // 7 if jan1_weekday > 4: week_number -= 1 return week_number def y(self): "Year, 2 digits; e.g. '99'" return six.text_type(self.data.year)[2:] def Y(self): "Year, 4 digits; e.g. '1999'" return self.data.year def z(self): "Day of the year; i.e. '0' to '365'" doy = self.year_days[self.data.month] + self.data.day if self.L() and self.data.month > 2: doy += 1 return doy def format(value, format_string): "Convenience function" df = DateFormat(value) return df.format(format_string) def time_format(value, format_string): "Convenience function" tf = TimeFormat(value) return tf.format(format_string)
bsd-3-clause
mfherbst/spack
var/spack/repos/builtin/packages/bwa/package.py
2
2458
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Bwa(Package): """Burrow-Wheeler Aligner for pairwise alignment between DNA sequences.""" homepage = "http://github.com/lh3/bwa" url = "https://github.com/lh3/bwa/releases/download/v0.7.15/bwa-0.7.15.tar.bz2" version('0.7.17', '82cba7ef695538e6a38b9d4156837381') version('0.7.16a', 'c5115c9a5ea0406848500e4b23a7708c') version('0.7.15', 'fcf470a46a1dbe2f96a1c5b87c530554') version('0.7.13', 'f094f609438511766c434178a3635ab4') version('0.7.12', 'e24a587baaad411d5da89516ad7a261a', url='https://github.com/lh3/bwa/archive/0.7.12.tar.gz') depends_on('zlib') def install(self, spec, prefix): filter_file(r'^INCLUDES=', "INCLUDES=-I%s" % spec['zlib'].prefix.include, 'Makefile') filter_file(r'^LIBS=', "LIBS=-L%s " % spec['zlib'].prefix.lib, 'Makefile') make() mkdirp(prefix.bin) install('bwa', join_path(prefix.bin, 'bwa')) set_executable(join_path(prefix.bin, 'bwa')) mkdirp(prefix.doc) install('README.md', prefix.doc) install('NEWS.md', prefix.doc) mkdirp(prefix.man.man1) install('bwa.1', prefix.man.man1)
lgpl-2.1
grupoprog3/proyecto_final
Entrega Final/flask/Lib/site-packages/click/_termui_impl.py
64
16377
""" click._termui_impl ~~~~~~~~~~~~~~~~~~ This module contains implementations for the termui module. To keep the import time of Click down, some infrequently used functionality is placed in this module and only imported as needed. :copyright: (c) 2014 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import os import sys import time import math from ._compat import _default_text_stdout, range_type, PY2, isatty, \ open_stream, strip_ansi, term_len, get_best_encoding, WIN from .utils import echo from .exceptions import ClickException if os.name == 'nt': BEFORE_BAR = '\r' AFTER_BAR = '\n' else: BEFORE_BAR = '\r\033[?25l' AFTER_BAR = '\033[?25h\n' def _length_hint(obj): """Returns the length hint of an object.""" try: return len(obj) except TypeError: try: get_hint = type(obj).__length_hint__ except AttributeError: return None try: hint = get_hint(obj) except TypeError: return None if hint is NotImplemented or \ not isinstance(hint, (int, long)) or \ hint < 0: return None return hint class ProgressBar(object): def __init__(self, iterable, length=None, fill_char='#', empty_char=' ', bar_template='%(bar)s', info_sep=' ', show_eta=True, show_percent=None, show_pos=False, item_show_func=None, label=None, file=None, color=None, width=30): self.fill_char = fill_char self.empty_char = empty_char self.bar_template = bar_template self.info_sep = info_sep self.show_eta = show_eta self.show_percent = show_percent self.show_pos = show_pos self.item_show_func = item_show_func self.label = label or '' if file is None: file = _default_text_stdout() self.file = file self.color = color self.width = width self.autowidth = width == 0 if length is None: length = _length_hint(iterable) if iterable is None: if length is None: raise TypeError('iterable or length is required') iterable = range_type(length) self.iter = iter(iterable) self.length = length self.length_known = length is not None self.pos = 0 self.avg = [] self.start = self.last_eta = time.time() self.eta_known = False self.finished = False self.max_width = None self.entered = False self.current_item = None self.is_hidden = not isatty(self.file) self._last_line = None def __enter__(self): self.entered = True self.render_progress() return self def __exit__(self, exc_type, exc_value, tb): self.render_finish() def __iter__(self): if not self.entered: raise RuntimeError('You need to use progress bars in a with block.') self.render_progress() return self def render_finish(self): if self.is_hidden: return self.file.write(AFTER_BAR) self.file.flush() @property def pct(self): if self.finished: return 1.0 return min(self.pos / (float(self.length) or 1), 1.0) @property def time_per_iteration(self): if not self.avg: return 0.0 return sum(self.avg) / float(len(self.avg)) @property def eta(self): if self.length_known and not self.finished: return self.time_per_iteration * (self.length - self.pos) return 0.0 def format_eta(self): if self.eta_known: t = self.eta + 1 seconds = t % 60 t /= 60 minutes = t % 60 t /= 60 hours = t % 24 t /= 24 if t > 0: days = t return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds) else: return '%02d:%02d:%02d' % (hours, minutes, seconds) return '' def format_pos(self): pos = str(self.pos) if self.length_known: pos += '/%s' % self.length return pos def format_pct(self): return ('% 4d%%' % int(self.pct * 100))[1:] def format_progress_line(self): show_percent = self.show_percent info_bits = [] if self.length_known: bar_length = int(self.pct * self.width) bar = self.fill_char * bar_length bar += self.empty_char * (self.width - bar_length) if show_percent is None: show_percent = not self.show_pos else: if self.finished: bar = self.fill_char * self.width else: bar = list(self.empty_char * (self.width or 1)) if self.time_per_iteration != 0: bar[int((math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) * self.width)] = self.fill_char bar = ''.join(bar) if self.show_pos: info_bits.append(self.format_pos()) if show_percent: info_bits.append(self.format_pct()) if self.show_eta and self.eta_known and not self.finished: info_bits.append(self.format_eta()) if self.item_show_func is not None: item_info = self.item_show_func(self.current_item) if item_info is not None: info_bits.append(item_info) return (self.bar_template % { 'label': self.label, 'bar': bar, 'info': self.info_sep.join(info_bits) }).rstrip() def render_progress(self): from .termui import get_terminal_size nl = False if self.is_hidden: buf = [self.label] nl = True else: buf = [] # Update width in case the terminal has been resized if self.autowidth: old_width = self.width self.width = 0 clutter_length = term_len(self.format_progress_line()) new_width = max(0, get_terminal_size()[0] - clutter_length) if new_width < old_width: buf.append(BEFORE_BAR) buf.append(' ' * self.max_width) self.max_width = new_width self.width = new_width clear_width = self.width if self.max_width is not None: clear_width = self.max_width buf.append(BEFORE_BAR) line = self.format_progress_line() line_len = term_len(line) if self.max_width is None or self.max_width < line_len: self.max_width = line_len buf.append(line) buf.append(' ' * (clear_width - line_len)) line = ''.join(buf) # Render the line only if it changed. if line != self._last_line: self._last_line = line echo(line, file=self.file, color=self.color, nl=nl) self.file.flush() def make_step(self, n_steps): self.pos += n_steps if self.length_known and self.pos >= self.length: self.finished = True if (time.time() - self.last_eta) < 1.0: return self.last_eta = time.time() self.avg = self.avg[-6:] + [-(self.start - time.time()) / (self.pos)] self.eta_known = self.length_known def update(self, n_steps): self.make_step(n_steps) self.render_progress() def finish(self): self.eta_known = 0 self.current_item = None self.finished = True def next(self): if self.is_hidden: return next(self.iter) try: rv = next(self.iter) self.current_item = rv except StopIteration: self.finish() self.render_progress() raise StopIteration() else: self.update(1) return rv if not PY2: __next__ = next del next def pager(text, color=None): """Decide what method to use for paging through text.""" stdout = _default_text_stdout() if not isatty(sys.stdin) or not isatty(stdout): return _nullpager(stdout, text, color) pager_cmd = (os.environ.get('PAGER', None) or '').strip() if pager_cmd: if WIN: return _tempfilepager(text, pager_cmd, color) return _pipepager(text, pager_cmd, color) if os.environ.get('TERM') in ('dumb', 'emacs'): return _nullpager(stdout, text, color) if WIN or sys.platform.startswith('os2'): return _tempfilepager(text, 'more <', color) if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: return _pipepager(text, 'less', color) import tempfile fd, filename = tempfile.mkstemp() os.close(fd) try: if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0: return _pipepager(text, 'more', color) return _nullpager(stdout, text, color) finally: os.unlink(filename) def _pipepager(text, cmd, color): """Page through text by feeding it to another program. Invoking a pager through this might support colors. """ import subprocess env = dict(os.environ) # If we're piping to less we might support colors under the # condition that cmd_detail = cmd.rsplit('/', 1)[-1].split() if color is None and cmd_detail[0] == 'less': less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:]) if not less_flags: env['LESS'] = '-R' color = True elif 'r' in less_flags or 'R' in less_flags: color = True if not color: text = strip_ansi(text) c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) encoding = get_best_encoding(c.stdin) try: c.stdin.write(text.encode(encoding, 'replace')) c.stdin.close() except (IOError, KeyboardInterrupt): pass # Less doesn't respect ^C, but catches it for its own UI purposes (aborting # search or other commands inside less). # # That means when the user hits ^C, the parent process (click) terminates, # but less is still alive, paging the output and messing up the terminal. # # If the user wants to make the pager exit on ^C, they should set # `LESS='-K'`. It's not our decision to make. while True: try: c.wait() except KeyboardInterrupt: pass else: break def _tempfilepager(text, cmd, color): """Page through text by invoking a program on a temporary file.""" import tempfile filename = tempfile.mktemp() if not color: text = strip_ansi(text) encoding = get_best_encoding(sys.stdout) with open_stream(filename, 'wb')[0] as f: f.write(text.encode(encoding)) try: os.system(cmd + ' "' + filename + '"') finally: os.unlink(filename) def _nullpager(stream, text, color): """Simply print unformatted text. This is the ultimate fallback.""" if not color: text = strip_ansi(text) stream.write(text) class Editor(object): def __init__(self, editor=None, env=None, require_save=True, extension='.txt'): self.editor = editor self.env = env self.require_save = require_save self.extension = extension def get_editor(self): if self.editor is not None: return self.editor for key in 'VISUAL', 'EDITOR': rv = os.environ.get(key) if rv: return rv if WIN: return 'notepad' for editor in 'vim', 'nano': if os.system('which %s >/dev/null 2>&1' % editor) == 0: return editor return 'vi' def edit_file(self, filename): import subprocess editor = self.get_editor() if self.env: environ = os.environ.copy() environ.update(self.env) else: environ = None try: c = subprocess.Popen('%s "%s"' % (editor, filename), env=environ, shell=True) exit_code = c.wait() if exit_code != 0: raise ClickException('%s: Editing failed!' % editor) except OSError as e: raise ClickException('%s: Editing failed: %s' % (editor, e)) def edit(self, text): import tempfile text = text or '' if text and not text.endswith('\n'): text += '\n' fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension) try: if WIN: encoding = 'utf-8-sig' text = text.replace('\n', '\r\n') else: encoding = 'utf-8' text = text.encode(encoding) f = os.fdopen(fd, 'wb') f.write(text) f.close() timestamp = os.path.getmtime(name) self.edit_file(name) if self.require_save \ and os.path.getmtime(name) == timestamp: return None f = open(name, 'rb') try: rv = f.read() finally: f.close() return rv.decode('utf-8-sig').replace('\r\n', '\n') finally: os.unlink(name) def open_url(url, wait=False, locate=False): import subprocess def _unquote_file(url): try: import urllib except ImportError: import urllib if url.startswith('file://'): url = urllib.unquote(url[7:]) return url if sys.platform == 'darwin': args = ['open'] if wait: args.append('-W') if locate: args.append('-R') args.append(_unquote_file(url)) null = open('/dev/null', 'w') try: return subprocess.Popen(args, stderr=null).wait() finally: null.close() elif WIN: if locate: url = _unquote_file(url) args = 'explorer /select,"%s"' % _unquote_file( url.replace('"', '')) else: args = 'start %s "" "%s"' % ( wait and '/WAIT' or '', url.replace('"', '')) return os.system(args) try: if locate: url = os.path.dirname(_unquote_file(url)) or '.' else: url = _unquote_file(url) c = subprocess.Popen(['xdg-open', url]) if wait: return c.wait() return 0 except OSError: if url.startswith(('http://', 'https://')) and not locate and not wait: import webbrowser webbrowser.open(url) return 0 return 1 def _translate_ch_to_exc(ch): if ch == '\x03': raise KeyboardInterrupt() if ch == '\x04': raise EOFError() if WIN: import msvcrt def getchar(echo): rv = msvcrt.getch() if echo: msvcrt.putchar(rv) _translate_ch_to_exc(rv) if PY2: enc = getattr(sys.stdin, 'encoding', None) if enc is not None: rv = rv.decode(enc, 'replace') else: rv = rv.decode('cp1252', 'replace') return rv else: import tty import termios def getchar(echo): if not isatty(sys.stdin): f = open('/dev/tty') fd = f.fileno() else: fd = sys.stdin.fileno() f = None try: old_settings = termios.tcgetattr(fd) try: tty.setraw(fd) ch = os.read(fd, 32) if echo and isatty(sys.stdout): sys.stdout.write(ch) finally: termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) sys.stdout.flush() if f is not None: f.close() except termios.error: pass _translate_ch_to_exc(ch) return ch.decode(get_best_encoding(sys.stdin), 'replace')
apache-2.0
crawfordsm/pysalt
lib/saltstat.py
2
7174
################################# LICENSE ################################## # Copyright (c) 2009, South African Astronomical Observatory (SAAO) # # All rights reserved. # # # # Redistribution and use in source and binary forms, with or without # # modification, are permitted provided that the following conditions # # are met: # # # # * Redistributions of source code must retain the above copyright # # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # # notice, this list of conditions and the following disclaimer # # in the documentation and/or other materials provided with the # # distribution. # # * Neither the name of the South African Astronomical Observatory # # (SAAO) nor the names of its contributors may be used to endorse # # or promote products derived from this software without specific # # prior written permission. # # # # THIS SOFTWARE IS PROVIDED BY THE SAAO ''AS IS'' AND ANY EXPRESS OR # # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # # DISCLAIMED. IN NO EVENT SHALL THE SAAO BE LIABLE FOR ANY # # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS # # OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # # POSSIBILITY OF SUCH DAMAGE. # ############################################################################ """saltstat contains statistical functions""" import numpy as np from salterror import SaltError def mean(list): """calculate mean of numeric list""" total = 0 for item in list: total += item mean = total / len(list) return mean def median(x,logfile=None): """calculate median of numeric list logfile--depreciated variable """ try: return np.median(x) except Exception, e: message = 'Cannont calculate median because %s' % e raise SaltError(message) def mad(x): """Calculated the Median Absolute Deviation defined as: MAD=median(|x - median(x)|) """ return np.median(abs(x-np.median(x))) def median2d(arrays,logfile=None): """calculate median of 2d array logfile--depreciated variable """ try: arrays = arrays.ravel() median = np.median(arrays) except Exception, e: median=None message = 'ERROR -- SALTSTAT.MEDIAN2D: Cannot median image arrays because %s' % e raise SaltError(message) return median def mean2d(arrays): """calculate mean of 2d array""" mean = arrays[0] for image in arrays[1:]: mean += image mean /= len(arrays) return mean def std2dclip(arrays, mean, std, sig): """calculate clipped std of 2d array""" if np.size(arrays)==0: return 0 mask=(abs(arrays-mean) < sig*std) nsize=np.sum(mask) if nsize > 0: stddev=arrays[mask].std() else: return 0 return stddev def mean2dclip(arrays, mean, std, sig): """calculate the sigma clipped mean of 2d array""" if np.size(arrays)==0: return 0 mask=(abs(arrays-mean) < sig*std) if np.sum(mask) > 0: mean=arrays[mask].mean() else: return 0 return mean def median2dclip(arr, mean, std, sig): """calculate the sigma clipped median of 2d array""" if np.size(arr)==0: return 0 try: arr = arr.ravel() mask=(abs(arr-mean) < sig*std) median = np.median(arr[mask]) except Exception, e: median=-1 return median def iterstat(arr, sig, niter, verbose=False): """iterstas calculates an arrays statistics using a sigma clipped values """ mean=arr.mean() std=arr.std() median=np.median(arr) if verbose: print mean, median, std for i in range(niter): mask=(abs(arr-mean)<sig*std) mean=arr[mask].mean() std=arr[mask].std() median=np.median(arr[mask]) if verbose: print i,mask.sum(), mean, median, std return mean, median, std def median_combine(arrays, logfile=None, axis=0): """Median combine a set of arrays logfile--depreciated variable """ status = 0 try: median = np.median(arrays, axis=axis) except Exception, e: median=None message = 'ERROR -- SALTSTAT.MEDIAN_COMBINE: Cannot median combine arrays because %s' % e raise SaltError(message) return median, status def median_image(arr, nbin): """Median smooth an image with a filter size set by bin returns arr """ from scipy.ndimage.filters import median_filter try: arr=median_filter(arr,size=(nbin,nbin)) except Exception, e: raise SaltError('Could not median filter image because %s' % e) return arr def median_absolute_deviation(a, axis=None): """Compute the median absolute deviation Returns the median absolute deviation of the array elements. The MAD is defined as median(|a-median(a)|). Parameters ---------- a : array_like Input array or object that can be converted to an array. axis : int, optional Axis along which the medians are computed. The default (axis=None) is to compute the median along a flattened version of the array. Returns ------- median_absolute_deviation : ndarray A new array holding the result. If the input contains integers, or floats of smaller precision than 64, then the output Examples -------- This will generate random variates from a Gaussian distribution and return the median absolute deviation for that distribution:: >>> from astropy.stats import median_aboslute_deviation >>> from numpy.random import randn >>> randvar = randn(10000) >>> mad = median_absolute_deviation(randvar) See Also -------- median """ a = np.array(a, copy=False) a_median = np.median(a, axis=axis) #re-broadcast the output median array to subtract it if axis is not None: shape = list(a_median.shape) shape.append(1) a_median = a_median.reshape(shape) #calculated the median average deviation return np.median(np.abs(a - a_median), axis=axis)
bsd-3-clause
cybem/graphite-web-iow
webapp/graphite/logger.py
23
3767
"""Copyright 2008 Orbitz WorldWide Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.""" import os, logging from logging.handlers import TimedRotatingFileHandler as Rotater try: from logging import NullHandler except ImportError as ie: # py2.6 from logging import Handler class NullHandler(Handler): def emit(self, record): pass try: from logging import FileHandler except ImportError as ie: # py2.6 from logging.handlers import FileHandler from django.conf import settings logging.addLevelName(30,"rendering") logging.addLevelName(30,"cache") logging.addLevelName(30,"metric_access") class GraphiteLogger: def __init__(self): self.infoLogger = self._config_logger('info.log', 'info', True, level = logging.INFO, ) self.exceptionLogger = self._config_logger('exception.log', 'exception', True, ) self.cacheLogger = self._config_logger('cache.log', 'cache', settings.LOG_CACHE_PERFORMANCE, ) self.renderingLogger = self._config_logger('rendering.log', 'rendering', settings.LOG_RENDERING_PERFORMANCE, ) self.metricAccessLogger = self._config_logger('metricaccess.log', 'metric_access', settings.LOG_METRIC_ACCESS, ) @staticmethod def _config_logger(log_file_name, name, activate, level=None, when='midnight', backupCount=1): log_file = os.path.join(settings.LOG_DIR, log_file_name) logger = logging.getLogger(name) if level is not None: logger.setLevel(level) if activate: # if want to log this one formatter = logging.Formatter("%(asctime)s :: %(message)s","%a %b %d %H:%M:%S %Y") if settings.LOG_ROTATE: # if we want to rotate logs handler = Rotater(log_file, when=when, backupCount=backupCount) else: # let someone else, e.g. logrotate, rotate the logs handler = FileHandler(log_file) handler.setFormatter(formatter) logger.addHandler(handler) else: logger.addHandler(NullHandler()) return logger def info(self,msg,*args,**kwargs): return self.infoLogger.info(msg,*args,**kwargs) def exception(self,msg="Exception Caught",**kwargs): return self.exceptionLogger.exception(msg,**kwargs) def cache(self,msg,*args,**kwargs): return self.cacheLogger.log(30,msg,*args,**kwargs) def rendering(self,msg,*args,**kwargs): return self.renderingLogger.log(30,msg,*args,**kwargs) def metric_access(self,msg,*args,**kwargs): return self.metricAccessLogger.log(30,msg,*args,**kwargs) log = GraphiteLogger() # import-shared logger instance
apache-2.0
partizand/bankparser
setup.py
1
3023
#!/usr/bin/python3 """Setup """ #import distutils.cmd import re from setuptools import setup, find_packages # import build # import src.bankparser # class GenFiles(distutils.cmd.Command): # """Генерация некоторых файлов проекта и справки # """ # user_options = [] # description = 'generate .py and readme command' # # def initialize_options(self): # pass # # def finalize_options(self): # pass # # def run(self): # mybuild = build.MyBuild() # mybuild.buid() # # # class CopyScript(distutils.cmd.Command): # """ # Для отладочных целей. Копирует пакет без установки в указанный каталог # """ # user_options = [('pubdir=', None, 'Specify dir for public')] # description = 'copy script for testing' # # def initialize_options(self): # self.pubdir = None # # def finalize_options(self): # pass # # def run(self): # mybuild = build.MyBuild(self.pubdir) # mybuild.copy_script() # find version in init file def find_version(filename): with open(filename, 'r') as f: version_file = f.read() version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.") version = find_version("src/bankparser/__init__.py") with open('README.rst', encoding='utf-8') as f: long_description = f.read() setup(name='bankparser', version=version, author="partizand", author_email="", url="https://github.com/partizand/bankparser", description="Convert banks statements to qif format", long_description=long_description, license="GPLv3", keywords=["qif", "banking", "statement"], #cmdclass={'copyscript': CopyScript, 'genfiles': GenFiles}, classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python :: 3', 'Natural Language :: Russian', 'Topic :: Office/Business :: Financial :: Accounting', 'Topic :: Utilities', 'Environment :: Console', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License v3'], #packages=find_packages('src'), #packages=['bankparser', 'bankparser.banks', 'bankparser.test'], packages=['bankparser', 'bankparser.banks'], package_dir={'': 'src'}, #package_data={'bankparser': ['*.ini']}, test_suite='bankparser.test', install_requires=['setuptools'], # 'appdirs' # ], # namespace_packages=["bankparser"], entry_points={ 'console_scripts': ['bankparser = bankparser.bankparsercli:main'], }, #include_package_data=True, #zip_safe=False )
gpl-3.0
tima/ansible
lib/ansible/modules/messaging/rabbitmq_plugin.py
85
4566
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, Chatham Financial <oss@chathamfinancial.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: rabbitmq_plugin short_description: Manage RabbitMQ plugins description: - Manage RabbitMQ plugins. version_added: "1.1" author: - Chris Hoffman (@chrishoffman) options: names: description: - Comma-separated list of plugin names. required: true aliases: [name] new_only: description: - Only enable missing plugins. - Does not disable plugins that are not in the names list. type: bool default: "no" state: description: - Specify if plugins are to be enabled or disabled. default: enabled choices: [enabled, disabled] prefix: description: - Specify a custom install prefix to a Rabbit. version_added: "1.3" ''' EXAMPLES = ''' - name: Enables the rabbitmq_management plugin rabbitmq_plugin: names: rabbitmq_management state: enabled ''' RETURN = ''' enabled: description: list of plugins enabled during task run returned: always type: list sample: ["rabbitmq_management"] disabled: description: list of plugins disabled during task run returned: always type: list sample: ["rabbitmq_management"] ''' import os from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import AnsibleModule class RabbitMqPlugins(object): def __init__(self, module): self.module = module if module.params['prefix']: if os.path.isdir(os.path.join(module.params['prefix'], 'bin')): bin_path = os.path.join(module.params['prefix'], 'bin') elif os.path.isdir(os.path.join(module.params['prefix'], 'sbin')): bin_path = os.path.join(module.params['prefix'], 'sbin') else: # No such path exists. raise Exception("No binary folder in prefix %s" % module.params['prefix']) self._rabbitmq_plugins = bin_path + "/rabbitmq-plugins" else: self._rabbitmq_plugins = module.get_bin_path('rabbitmq-plugins', True) def _exec(self, args, run_in_check_mode=False): if not self.module.check_mode or (self.module.check_mode and run_in_check_mode): cmd = [self._rabbitmq_plugins] rc, out, err = self.module.run_command(cmd + args, check_rc=True) return out.splitlines() return list() def get_all(self): list_output = self._exec(['list', '-E', '-m'], True) plugins = [] for plugin in list_output: if not plugin: break plugins.append(plugin) return plugins def enable(self, name): self._exec(['enable', name]) def disable(self, name): self._exec(['disable', name]) def main(): arg_spec = dict( names=dict(required=True, aliases=['name']), new_only=dict(default='no', type='bool'), state=dict(default='enabled', choices=['enabled', 'disabled']), prefix=dict(required=False, default=None) ) module = AnsibleModule( argument_spec=arg_spec, supports_check_mode=True ) result = dict() names = module.params['names'].split(',') new_only = module.params['new_only'] state = module.params['state'] rabbitmq_plugins = RabbitMqPlugins(module) enabled_plugins = rabbitmq_plugins.get_all() enabled = [] disabled = [] if state == 'enabled': if not new_only: for plugin in enabled_plugins: if plugin not in names: rabbitmq_plugins.disable(plugin) disabled.append(plugin) for name in names: if name not in enabled_plugins: rabbitmq_plugins.enable(name) enabled.append(name) else: for plugin in enabled_plugins: if plugin in names: rabbitmq_plugins.disable(plugin) disabled.append(plugin) result['changed'] = len(enabled) > 0 or len(disabled) > 0 result['enabled'] = enabled result['disabled'] = disabled module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
GeotrekCE/Geotrek-admin
geotrek/api/v2/functions.py
2
1450
from django.db.models import Func from django.db.models.fields import FloatField, CharField from django.contrib.gis.db.models import GeometryField, PointField def Transform(geom, srid): """ ST_Transform postgis function """ return Func(geom, srid, function='ST_Transform') def Buffer(geom, radius, num_seg): """ ST_Buffer postgis function """ return Func(geom, radius, num_seg, function='ST_Buffer', output_field=GeometryField()) def GeometryType(geom): """ GeometryType postgis function """ return Func(geom, function='GeometryType', output_field=CharField()) def LineLocatePoint(line, geom): """ ST_LineLocatePoint postgis function """ return Func(line, geom, function='ST_LINELOCATEPOINT', output_field=FloatField()) class Length(Func): """ ST_Length postgis function """ function = 'ST_Length' output_field = FloatField() class Length3D(Func): """ ST_3DLENGTH postgis function """ function = 'ST_3DLENGTH' output_field = FloatField() class Area(Func): """ ST_Area postgis function """ function = 'ST_Area' output_field = FloatField() class StartPoint(Func): """ ST_StartPoint postgis function """ function = 'ST_StartPoint' output_field = PointField() class EndPoint(Func): """ ST_EndPoint postgis function """ function = 'ST_EndPoint' output_field = PointField()
bsd-2-clause
ahmadiga/min_edx
openedx/core/djangoapps/content/course_overviews/management/commands/generate_course_overview.py
29
2115
""" Command to load course overviews. """ import logging from optparse import make_option from django.core.management.base import BaseCommand, CommandError from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import CourseKey from xmodule.modulestore.django import modulestore from openedx.core.djangoapps.content.course_overviews.models import CourseOverview log = logging.getLogger(__name__) class Command(BaseCommand): """ Example usage: $ ./manage.py lms generate_course_overview --all --settings=devstack $ ./manage.py lms generate_course_overview 'edX/DemoX/Demo_Course' --settings=devstack """ args = '<course_id course_id ...>' help = 'Generates and stores course overview for one or more courses.' option_list = BaseCommand.option_list + ( make_option('--all', action='store_true', default=False, help='Generate course overview for all courses.'), ) def handle(self, *args, **options): course_keys = [] if options['all']: course_keys = [course.id for course in modulestore().get_courses()] else: if len(args) < 1: raise CommandError('At least one course or --all must be specified.') try: course_keys = [CourseKey.from_string(arg) for arg in args] except InvalidKeyError: log.fatal('Invalid key specified.') if not course_keys: log.fatal('No courses specified.') log.info('Generating course overview for %d courses.', len(course_keys)) log.debug('Generating course overview(s) for the following courses: %s', course_keys) for course_key in course_keys: try: CourseOverview.get_from_id(course_key) except Exception as ex: # pylint: disable=broad-except log.exception('An error occurred while generating course overview for %s: %s', unicode( course_key), ex.message) log.info('Finished generating course overviews.')
agpl-3.0
CloverHealth/airflow
airflow/contrib/kubernetes/kubernetes_request_factory/pod_request_factory.py
1
3983
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import yaml from airflow.contrib.kubernetes.kubernetes_request_factory.kubernetes_request_factory \ import KubernetesRequestFactory class SimplePodRequestFactory(KubernetesRequestFactory): """ Request generator for a simple pod. """ _yaml = """apiVersion: v1 kind: Pod metadata: name: name spec: containers: - name: base image: airflow-worker:latest command: ["/usr/local/airflow/entrypoint.sh", "/bin/bash sleep 25"] restartPolicy: Never """ def __init__(self): pass def create(self, pod): # type: (Pod) -> dict req = yaml.load(self._yaml) self.extract_name(pod, req) self.extract_labels(pod, req) self.extract_image(pod, req) self.extract_image_pull_policy(pod, req) self.extract_cmds(pod, req) self.extract_args(pod, req) self.extract_node_selector(pod, req) self.extract_env_and_secrets(pod, req) self.extract_volume_secrets(pod, req) self.attach_volumes(pod, req) self.attach_volume_mounts(pod, req) self.extract_resources(pod, req) self.extract_service_account_name(pod, req) self.extract_init_containers(pod, req) self.extract_image_pull_secrets(pod, req) self.extract_annotations(pod, req) self.extract_affinity(pod, req) self.extract_tolerations(pod, req) return req class ExtractXcomPodRequestFactory(KubernetesRequestFactory): XCOM_MOUNT_PATH = '/airflow/xcom' SIDECAR_CONTAINER_NAME = 'airflow-xcom-sidecar' """ Request generator for a pod with sidecar container. """ _yaml = """apiVersion: v1 kind: Pod metadata: name: name spec: volumes: - name: xcom emptyDir: {{}} containers: - name: base image: airflow-worker:latest command: ["/usr/local/airflow/entrypoint.sh", "/bin/bash sleep 25"] volumeMounts: - name: xcom mountPath: {xcomMountPath} - name: {sidecarContainerName} image: python:3.5-alpine command: ["python", "-m", "http.server"] volumeMounts: - name: xcom mountPath: {xcomMountPath} restartPolicy: Never """.format(xcomMountPath=XCOM_MOUNT_PATH, sidecarContainerName=SIDECAR_CONTAINER_NAME) def __init__(self): pass def create(self, pod): # type: (Pod) -> dict req = yaml.load(self._yaml) self.extract_name(pod, req) self.extract_labels(pod, req) self.extract_image(pod, req) self.extract_image_pull_policy(pod, req) self.extract_cmds(pod, req) self.extract_args(pod, req) self.extract_node_selector(pod, req) self.extract_env_and_secrets(pod, req) self.extract_volume_secrets(pod, req) self.attach_volumes(pod, req) self.attach_volume_mounts(pod, req) self.extract_resources(pod, req) self.extract_service_account_name(pod, req) self.extract_init_containers(pod, req) self.extract_image_pull_secrets(pod, req) self.extract_annotations(pod, req) self.extract_affinity(pod, req) self.extract_tolerations(pod, req) return req
apache-2.0
1st/django-social-auth
social_auth/backends/contrib/douban.py
3
2462
""" Douban OAuth support. This adds support for Douban OAuth service. An application must be registered first on douban.com and the settings DOUBAN_CONSUMER_KEY and DOUBAN_CONSUMER_SECRET must be defined with they corresponding values. By default account id is stored in extra_data field, check OAuthBackend class for details on how to extend it. """ from django.utils import simplejson from social_auth.backends import ConsumerBasedOAuth, OAuthBackend, USERNAME from social_auth.backends.exceptions import AuthCanceled DOUBAN_SERVER = 'www.douban.com' DOUBAN_REQUEST_TOKEN_URL = 'http://%s/service/auth/request_token' % \ DOUBAN_SERVER DOUBAN_ACCESS_TOKEN_URL = 'http://%s/service/auth/access_token' % \ DOUBAN_SERVER DOUBAN_AUTHORIZATION_URL = 'http://%s/service/auth/authorize' % \ DOUBAN_SERVER class DoubanBackend(OAuthBackend): """Douban OAuth authentication backend""" name = 'douban' EXTRA_DATA = [('id', 'id')] def get_user_id(self, details, response): return response['db:uid']['$t'] def get_user_details(self, response): """Return user details from Douban""" return {USERNAME: response["db:uid"]["$t"], 'email': ''} class DoubanAuth(ConsumerBasedOAuth): """Douban OAuth authentication mechanism""" AUTHORIZATION_URL = DOUBAN_AUTHORIZATION_URL REQUEST_TOKEN_URL = DOUBAN_REQUEST_TOKEN_URL ACCESS_TOKEN_URL = DOUBAN_ACCESS_TOKEN_URL SERVER_URL = DOUBAN_SERVER AUTH_BACKEND = DoubanBackend SETTINGS_KEY_NAME = 'DOUBAN_CONSUMER_KEY' SETTINGS_SECRET_NAME = 'DOUBAN_CONSUMER_SECRET' def user_data(self, access_token, *args, **kwargs): """Return user data provided""" url = 'http://api.douban.com/people/%40me?&alt=json' request = self.oauth_request(access_token, url) json = self.fetch_response(request) try: return simplejson.loads(json) except ValueError: return None def auth_complete(self, *args, **kwargs): """Completes login process, must return user instance""" if 'denied' in self.data: raise AuthCanceled(self) else: return super(DoubanAuth, self).auth_complete(*args, **kwargs) # Backend definition BACKENDS = { 'douban': DoubanAuth, }
bsd-3-clause
sestrella/ansible
test/units/modules/network/ios/ios_module.py
50
2516
# (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestIosModule(ModuleTestCase): def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False): self.load_fixtures(commands) if failed: result = self.failed() self.assertTrue(result['failed'], result) else: result = self.changed(changed) self.assertEqual(result['changed'], changed, result) if commands is not None: if sort: self.assertEqual(sorted(commands), sorted(result['commands']), result['commands']) else: self.assertEqual(commands, result['commands'], result['commands']) return result def failed(self): with self.assertRaises(AnsibleFailJson) as exc: self.module.main() result = exc.exception.args[0] self.assertTrue(result['failed'], result) return result def changed(self, changed=False): with self.assertRaises(AnsibleExitJson) as exc: self.module.main() result = exc.exception.args[0] self.assertEqual(result['changed'], changed, result) return result def load_fixtures(self, commands=None): pass
gpl-3.0
jordanemedlock/psychtruths
temboo/core/Library/eBay/Trading/GetMemberMessages.py
4
6986
# -*- coding: utf-8 -*- ############################################################################### # # GetMemberMessages # Retrieves a list of the messages that buyers have posted about your active item listings. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class GetMemberMessages(Choreography): def __init__(self, temboo_session): """ Create a new instance of the GetMemberMessages Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(GetMemberMessages, self).__init__(temboo_session, '/Library/eBay/Trading/GetMemberMessages') def new_input_set(self): return GetMemberMessagesInputSet() def _make_result_set(self, result, path): return GetMemberMessagesResultSet(result, path) def _make_execution(self, session, exec_id, path): return GetMemberMessagesChoreographyExecution(session, exec_id, path) class GetMemberMessagesInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the GetMemberMessages Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_DisplayToPublic(self, value): """ Set the value of the DisplayToPublic input for this Choreo. ((optional, boolean) When set to true, only public messages (viewable in the Item listing) are returned.) """ super(GetMemberMessagesInputSet, self)._set_input('DisplayToPublic', value) def set_EndCreationTime(self, value): """ Set the value of the EndCreationTime input for this Choreo. ((optional, date) Used to filter by date range (e.g., 2013-02-08T00:00:00.000Z).) """ super(GetMemberMessagesInputSet, self)._set_input('EndCreationTime', value) def set_EntriesPerPage(self, value): """ Set the value of the EntriesPerPage input for this Choreo. ((optional, integer) The maximum number of records to return in the result.) """ super(GetMemberMessagesInputSet, self)._set_input('EntriesPerPage', value) def set_ItemID(self, value): """ Set the value of the ItemID input for this Choreo. ((optional, string) The ID of the item the message is about.) """ super(GetMemberMessagesInputSet, self)._set_input('ItemID', value) def set_MailMessageType(self, value): """ Set the value of the MailMessageType input for this Choreo. ((required, string) The type of message to retrieve. Valid values are: All and AskSellerQuestion. When set to AskSellerQuestion, ItemID or a date range filter must be specified.) """ super(GetMemberMessagesInputSet, self)._set_input('MailMessageType', value) def set_MemberMessageID(self, value): """ Set the value of the MemberMessageID input for this Choreo. ((optional, string) An ID that uniquely identifies the message for a given user to be retrieved.) """ super(GetMemberMessagesInputSet, self)._set_input('MemberMessageID', value) def set_MessageStatus(self, value): """ Set the value of the MessageStatus input for this Choreo. ((optional, string) The status of the message. Valid values are: Answered and Unanswered.) """ super(GetMemberMessagesInputSet, self)._set_input('MessageStatus', value) def set_PageNumber(self, value): """ Set the value of the PageNumber input for this Choreo. ((optional, integer) Specifies the page number of the results to return.) """ super(GetMemberMessagesInputSet, self)._set_input('PageNumber', value) def set_ResponseFormat(self, value): """ Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: json (the default) and xml.) """ super(GetMemberMessagesInputSet, self)._set_input('ResponseFormat', value) def set_SandboxMode(self, value): """ Set the value of the SandboxMode input for this Choreo. ((conditional, boolean) Indicates that the request should be made to the sandbox endpoint instead of the production endpoint. Set to 1 to enable sandbox mode.) """ super(GetMemberMessagesInputSet, self)._set_input('SandboxMode', value) def set_SenderID(self, value): """ Set the value of the SenderID input for this Choreo. ((optional, string) The seller's UserID.) """ super(GetMemberMessagesInputSet, self)._set_input('SenderID', value) def set_SiteID(self, value): """ Set the value of the SiteID input for this Choreo. ((optional, string) The eBay site ID that you want to access. Defaults to 0 indicating the US site.) """ super(GetMemberMessagesInputSet, self)._set_input('SiteID', value) def set_StartCreationTime(self, value): """ Set the value of the StartCreationTime input for this Choreo. ((optional, date) Used to filter by date range (e.g., 2013-02-08T00:00:00.000Z).) """ super(GetMemberMessagesInputSet, self)._set_input('StartCreationTime', value) def set_UserToken(self, value): """ Set the value of the UserToken input for this Choreo. ((required, string) A valid eBay Auth Token.) """ super(GetMemberMessagesInputSet, self)._set_input('UserToken', value) class GetMemberMessagesResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the GetMemberMessages Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. (The response from eBay.) """ return self._output.get('Response', None) class GetMemberMessagesChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return GetMemberMessagesResultSet(response, path)
apache-2.0
openhatch/oh-mainline
vendor/packages/scrapy/scrapy/http/response/dammit.py
16
11593
""" This module contains a fork of the UnicodeDammit class from BeautifulSoup, that expliclty disabled any usage of chardet library. The UnicodeDammit class is used as a last resource for detecting the encoding of a response. """ import re import codecs chardet = None # we don't want to use chardet since it's very slow, class UnicodeDammit: """A class for detecting the encoding of a *ML document and converting it to a Unicode string. If the source encoding is windows-1252, can replace MS smart quotes with their HTML or XML equivalents.""" # This dictionary maps commonly seen values for "charset" in HTML # meta tags to the corresponding Python codec names. It only covers # values that aren't in Python's aliases and can't be determined # by the heuristics in find_codec. CHARSET_ALIASES = { "macintosh" : "mac-roman", "x-sjis" : "shift-jis" } def __init__(self, markup, overrideEncodings=[], smartQuotesTo='xml', isHTML=False): self.declaredHTMLEncoding = None self.markup, documentEncoding, sniffedEncoding = \ self._detectEncoding(markup, isHTML) self.smartQuotesTo = smartQuotesTo self.triedEncodings = [] if markup == '' or isinstance(markup, unicode): self.originalEncoding = None self.unicode = unicode(markup) return u = None for proposedEncoding in overrideEncodings: u = self._convertFrom(proposedEncoding) if u: break if not u: for proposedEncoding in (documentEncoding, sniffedEncoding): u = self._convertFrom(proposedEncoding) if u: break # If no luck and we have auto-detection library, try that: if not u and chardet and not isinstance(self.markup, unicode): u = self._convertFrom(chardet.detect(self.markup)['encoding']) # As a last resort, try utf-8 and windows-1252: if not u: for proposed_encoding in ("utf-8", "windows-1252"): u = self._convertFrom(proposed_encoding) if u: break self.unicode = u if not u: self.originalEncoding = None def _subMSChar(self, orig): """Changes a MS smart quote character to an XML or HTML entity.""" sub = self.MS_CHARS.get(orig) if isinstance(sub, tuple): if self.smartQuotesTo == 'xml': sub = '&#x%s;' % sub[1] else: sub = '&%s;' % sub[0] return sub def _convertFrom(self, proposed): proposed = self.find_codec(proposed) if not proposed or proposed in self.triedEncodings: return None self.triedEncodings.append(proposed) markup = self.markup # Convert smart quotes to HTML if coming from an encoding # that might have them. if self.smartQuotesTo and proposed.lower() in("windows-1252", "iso-8859-1", "iso-8859-2"): markup = re.compile("([\x80-\x9f])").sub \ (lambda(x): self._subMSChar(x.group(1)), markup) try: # print "Trying to convert document to %s" % proposed u = self._toUnicode(markup, proposed) self.markup = u self.originalEncoding = proposed except Exception, e: # print "That didn't work!" # print e return None #print "Correct encoding: %s" % proposed return self.markup def _toUnicode(self, data, encoding): '''Given a string and its encoding, decodes the string into Unicode. %encoding is a string recognized by encodings.aliases''' # strip Byte Order Mark (if present) if (len(data) >= 4) and (data[:2] == '\xfe\xff') \ and (data[2:4] != '\x00\x00'): encoding = 'utf-16be' data = data[2:] elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \ and (data[2:4] != '\x00\x00'): encoding = 'utf-16le' data = data[2:] elif data[:3] == '\xef\xbb\xbf': encoding = 'utf-8' data = data[3:] elif data[:4] == '\x00\x00\xfe\xff': encoding = 'utf-32be' data = data[4:] elif data[:4] == '\xff\xfe\x00\x00': encoding = 'utf-32le' data = data[4:] newdata = unicode(data, encoding) return newdata def _detectEncoding(self, xml_data, isHTML=False): """Given a document, tries to detect its XML encoding.""" xml_encoding = sniffed_xml_encoding = None try: if xml_data[:4] == '\x4c\x6f\xa7\x94': # EBCDIC xml_data = self._ebcdic_to_ascii(xml_data) elif xml_data[:4] == '\x00\x3c\x00\x3f': # UTF-16BE sniffed_xml_encoding = 'utf-16be' xml_data = unicode(xml_data, 'utf-16be').encode('utf-8') elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \ and (xml_data[2:4] != '\x00\x00'): # UTF-16BE with BOM sniffed_xml_encoding = 'utf-16be' xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8') elif xml_data[:4] == '\x3c\x00\x3f\x00': # UTF-16LE sniffed_xml_encoding = 'utf-16le' xml_data = unicode(xml_data, 'utf-16le').encode('utf-8') elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \ (xml_data[2:4] != '\x00\x00'): # UTF-16LE with BOM sniffed_xml_encoding = 'utf-16le' xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8') elif xml_data[:4] == '\x00\x00\x00\x3c': # UTF-32BE sniffed_xml_encoding = 'utf-32be' xml_data = unicode(xml_data, 'utf-32be').encode('utf-8') elif xml_data[:4] == '\x3c\x00\x00\x00': # UTF-32LE sniffed_xml_encoding = 'utf-32le' xml_data = unicode(xml_data, 'utf-32le').encode('utf-8') elif xml_data[:4] == '\x00\x00\xfe\xff': # UTF-32BE with BOM sniffed_xml_encoding = 'utf-32be' xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8') elif xml_data[:4] == '\xff\xfe\x00\x00': # UTF-32LE with BOM sniffed_xml_encoding = 'utf-32le' xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8') elif xml_data[:3] == '\xef\xbb\xbf': # UTF-8 with BOM sniffed_xml_encoding = 'utf-8' xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8') else: sniffed_xml_encoding = 'ascii' pass except: xml_encoding_match = None xml_encoding_match = re.compile( '^<\?.*encoding=[\'"](.*?)[\'"].*\?>').match(xml_data) if not xml_encoding_match and isHTML: regexp = re.compile('<\s*meta[^>]+charset=([^>]*?)[;\'">]', re.I) xml_encoding_match = regexp.search(xml_data) if xml_encoding_match is not None: xml_encoding = xml_encoding_match.groups()[0].lower() if isHTML: self.declaredHTMLEncoding = xml_encoding if sniffed_xml_encoding and \ (xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode', 'iso-10646-ucs-4', 'ucs-4', 'csucs4', 'utf-16', 'utf-32', 'utf_16', 'utf_32', 'utf16', 'u16')): xml_encoding = sniffed_xml_encoding return xml_data, xml_encoding, sniffed_xml_encoding def find_codec(self, charset): return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \ or (charset and self._codec(charset.replace("-", ""))) \ or (charset and self._codec(charset.replace("-", "_"))) \ or charset def _codec(self, charset): if not charset: return charset codec = None try: codecs.lookup(charset) codec = charset except (LookupError, ValueError): pass return codec EBCDIC_TO_ASCII_MAP = None def _ebcdic_to_ascii(self, s): c = self.__class__ if not c.EBCDIC_TO_ASCII_MAP: emap = (0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15, 16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31, 128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7, 144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26, 32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33, 38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94, 45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63, 186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34, 195,97,98,99,100,101,102,103,104,105,196,197,198,199,200, 201,202,106,107,108,109,110,111,112,113,114,203,204,205, 206,207,208,209,126,115,116,117,118,119,120,121,122,210, 211,212,213,214,215,216,217,218,219,220,221,222,223,224, 225,226,227,228,229,230,231,123,65,66,67,68,69,70,71,72, 73,232,233,234,235,236,237,125,74,75,76,77,78,79,80,81, 82,238,239,240,241,242,243,92,159,83,84,85,86,87,88,89, 90,244,245,246,247,248,249,48,49,50,51,52,53,54,55,56,57, 250,251,252,253,254,255) import string c.EBCDIC_TO_ASCII_MAP = string.maketrans( \ ''.join(map(chr, range(256))), ''.join(map(chr, emap))) return s.translate(c.EBCDIC_TO_ASCII_MAP) MS_CHARS = { '\x80' : ('euro', '20AC'), '\x81' : ' ', '\x82' : ('sbquo', '201A'), '\x83' : ('fnof', '192'), '\x84' : ('bdquo', '201E'), '\x85' : ('hellip', '2026'), '\x86' : ('dagger', '2020'), '\x87' : ('Dagger', '2021'), '\x88' : ('circ', '2C6'), '\x89' : ('permil', '2030'), '\x8A' : ('Scaron', '160'), '\x8B' : ('lsaquo', '2039'), '\x8C' : ('OElig', '152'), '\x8D' : '?', '\x8E' : ('#x17D', '17D'), '\x8F' : '?', '\x90' : '?', '\x91' : ('lsquo', '2018'), '\x92' : ('rsquo', '2019'), '\x93' : ('ldquo', '201C'), '\x94' : ('rdquo', '201D'), '\x95' : ('bull', '2022'), '\x96' : ('ndash', '2013'), '\x97' : ('mdash', '2014'), '\x98' : ('tilde', '2DC'), '\x99' : ('trade', '2122'), '\x9a' : ('scaron', '161'), '\x9b' : ('rsaquo', '203A'), '\x9c' : ('oelig', '153'), '\x9d' : '?', '\x9e' : ('#x17E', '17E'), '\x9f' : ('Yuml', ''),} #######################################################################
agpl-3.0
fly19890211/edx-platform
docs/en_us/platform_api/source/conf.py
6
7106
# -*- coding: utf-8 -*- # pylint: disable=invalid-name # pylint: disable=redefined-builtin # pylint: disable=protected-access # pylint: disable=unused-argument import os from path import Path as path import sys import mock MOCK_MODULES = [ 'lxml', 'requests', 'xblock', 'fields', 'xblock.fields', 'frament', 'xblock.fragment', 'webob', 'multidict', 'webob.multidict', 'core', 'xblock.core', 'runtime', 'xblock.runtime', 'sortedcontainers', 'contracts', 'plugin', 'xblock.plugin', 'opaque_keys.edx.asides', 'asides', 'dogstats_wrapper', 'fs', 'fs.errors', 'edxmako', 'edxmako.shortcuts', 'shortcuts', 'crum', 'opaque_keys.edx.locator', 'LibraryLocator', 'Location', 'ipware', 'ip', 'ipware.ip', 'get_ip', 'pygeoip', 'ipaddr', 'django_countries', 'fields', 'django_countries.fields', 'opaque_keys', 'opaque_keys.edx', 'opaque_keys.edx.keys', 'CourseKey', 'UsageKey', 'BlockTypeKey', 'opaque_keys.edx.locations', 'SlashSeparatedCourseKey', 'Locator', 'south', 'modelsinspector', 'south.modelsinspector', 'add_introspection_rules', 'courseware', 'access', 'courseware.access', 'is_mobile_available_for_user', 'courseware.model_data', 'courseware.module_render', 'courseware.views', 'util.request', 'eventtracking', 'xmodule', 'xmodule.exceptions', 'xmodule.modulestore', 'xmodule.modulestore.exceptions', 'xmodule.modulestore.django', 'courseware.models', 'milestones', 'milestones.api', 'milestones.models', 'milestones.exceptions', 'ratelimitbackend', 'analytics', 'courseware.courses', 'staticfiles', 'storage', 'staticfiles.storage', 'content', 'xmodule.contentstore', 'xmodule.contentstore.content', 'xblock.exceptions', 'xmodule.seq_module', 'xmodule.vertical_module', 'xmodule.x_module', 'nltk', 'ratelimitbackend', 'ratelimitbackend.exceptions', 'social', 'social.apps', 'social.apps.django_app', 'social.backends', 'mako', 'exceptions', 'mako.exceptions', 'boto', 'exception', 'boto.exception', 'PIL', 'reportlab', 'lib', 'reportlab.lib', 'pdfgen', 'canvas', 'pdfgen', 'pdfgen.canvas', 'reportlab.pdfgen', 'reportlab.pdfgen.canvas', 'reportlab.lib.pagesizes', 'reportlab.lib.units', 'reportlab.lib.styles', 'reportlab.platypus', 'reportlab.platypus.tables', 'boto', 's3', 'connection', 'boto.s3', 'boto.s3.connection', 'boto.s3.key', 'Crypto', 'Crypto.Cipher', 'Crypto.PublicKey', 'openid', 'store', 'interface', 'openid.store', 'store.interface', 'openid.store.interface', 'external_auth.views', 'html_to_text', 'mail_utils', 'ratelimitbackend.backends', 'social.apps.django_app.default', 'social.exceptions', 'social.pipeline', 'xmodule.error_module', 'accounts.api', 'modulestore.mongo.base', 'xmodule.modulestore.mongo', 'xmodule.modulestore.mongo.base', 'edxval', 'edxval.api', 'model_utils', 'model_utils.models', 'model_utils.managers', 'certificates', 'certificates.models', 'certificates.models.GeneratedCertificate', 'shoppingcart', 'shopppingcart.models', 'shopppingcart.api', 'api', 'student', 'student.views', 'student.forms', 'student.models', 'celery', 'celery.task', 'student.roles', 'embargo.models', 'xmodule.vertical_block', 'vertical_block', 'errors', 'UserNotFound', 'UserNotAuthorized', 'AccountUpdateError', 'AccountValidationError', 'transaction', 'parsers', 'MergePatchParser', 'get_account_settings', 'update_account_settings', 'serializers', 'profile_images.images', 'xmodule.course_module', 'user_api.accounts.api', 'user_api.accounts.serializers', 'ecommerce_api_client', 'client', 'ecommerce_api_client.client', 'ecommerce_api_client.exceptions', 'student.auth', 'ccx_keys', 'ccx_keys.locator', 'user_api.preferences.api' ] for mod_name in MOCK_MODULES: sys.modules[mod_name] = mock.Mock(class_that_is_extended=object) if "DJANGO_SETTINGS_MODULE" not in os.environ: docs_path = os.getcwd() mezzanine_path_parts = (docs_path, "..") sys.path.insert(0, docs_path) sys.path.insert(0, os.path.realpath(os.path.join(*mezzanine_path_parts))) os.environ["DJANGO_SETTINGS_MODULE"] = "docs_settings" # Django 1.7's setup is required before touching translated strings. import django try: django.setup() except AttributeError: # < 1.7 pass on_rtd = os.environ.get('READTHEDOCS', None) == 'True' sys.path.append('../../../../') os.environ['DJANGO_SETTINGS_MODULE'] = 'lms.envs.dev' #os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lms.envs.dev") from docs.shared.conf import * # Add any paths that contain templates here, relative to this directory. #templates_path.append('source/_templates') # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path.append('source/_static') if not on_rtd: # only import and set the theme if we're building docs locally import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. root = path('../../../..').abspath() sys.path.insert(0, root) sys.path.append(root / "common/lib/xmodule") sys.path.append(root / "common/djangoapps") sys.path.append(root / "lms/djangoapps") sys.path.append(root / "lms/envs") sys.path.append(root / "openedx/core/djangoapps") sys.path.insert( 0, os.path.abspath( os.path.normpath( os.path.dirname(__file__) + '/../../../' ) ) ) sys.path.append('.') # django configuration - careful here if on_rtd: os.environ['DJANGO_SETTINGS_MODULE'] = 'lms' else: os.environ['DJANGO_SETTINGS_MODULE'] = 'lms' # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon'] project = u'Open edX Platform APIs' copyright = u'2015, edX' exclude_patterns = ['build', 'links.rst']
agpl-3.0
bjlittle/iris
docs/gallery_code/oceanography/plot_atlantic_profiles.py
2
3317
""" Oceanographic Profiles and T-S Diagrams ======================================= This example demonstrates how to plot vertical profiles of different variables in the same axes, and how to make a scatter plot of two variables. There is an oceanographic theme but the same techniques are equally applicable to atmospheric or other kinds of data. The data used are profiles of potential temperature and salinity in the Equatorial and South Atlantic, output from an ocean model. The y-axis of the first plot produced will be automatically inverted due to the presence of the attribute positive=down on the depth coordinate. This means depth values intuitively increase downward on the y-axis. """ import matplotlib.pyplot as plt import iris import iris.iterate import iris.plot as iplt def main(): # Load the gridded temperature and salinity data. fname = iris.sample_data_path("atlantic_profiles.nc") cubes = iris.load(fname) (theta,) = cubes.extract("sea_water_potential_temperature") (salinity,) = cubes.extract("sea_water_practical_salinity") # Extract profiles of temperature and salinity from a particular point in # the southern portion of the domain, and limit the depth of the profile # to 1000m. lon_cons = iris.Constraint(longitude=330.5) lat_cons = iris.Constraint(latitude=lambda l: -10 < l < -9) depth_cons = iris.Constraint(depth=lambda d: d <= 1000) theta_1000m = theta.extract(depth_cons & lon_cons & lat_cons) salinity_1000m = salinity.extract(depth_cons & lon_cons & lat_cons) # Plot these profiles on the same set of axes. Depth is automatically # recognised as a vertical coordinate and placed on the y-axis. # The first plot is in the default axes. We'll use the same color for the # curve and its axes/tick labels. plt.figure(figsize=(5, 6)) temperature_color = (0.3, 0.4, 0.5) ax1 = plt.gca() iplt.plot( theta_1000m, linewidth=2, color=temperature_color, alpha=0.75, ) ax1.set_xlabel("Potential Temperature / K", color=temperature_color) ax1.set_ylabel("Depth / m") for ticklabel in ax1.get_xticklabels(): ticklabel.set_color(temperature_color) # To plot salinity in the same axes we use twiny(). We'll use a different # color to identify salinity. salinity_color = (0.6, 0.1, 0.15) ax2 = plt.gca().twiny() iplt.plot( salinity_1000m, linewidth=2, color=salinity_color, alpha=0.75, ) ax2.set_xlabel("Salinity / PSU", color=salinity_color) for ticklabel in ax2.get_xticklabels(): ticklabel.set_color(salinity_color) plt.tight_layout() iplt.show() # Now plot a T-S diagram using scatter. We'll use all the profiles here, # and each point will be coloured according to its depth. plt.figure(figsize=(6, 6)) depth_values = theta.coord("depth").points for s, t in iris.iterate.izip(salinity, theta, coords="depth"): iplt.scatter(s, t, c=depth_values, marker="+", cmap="RdYlBu_r") ax = plt.gca() ax.set_xlabel("Salinity / PSU") ax.set_ylabel("Potential Temperature / K") cb = plt.colorbar(orientation="horizontal") cb.set_label("Depth / m") plt.tight_layout() iplt.show() if __name__ == "__main__": main()
lgpl-3.0
davidjb/sqlalchemy
test/engine/test_reflection.py
21
59908
import operator import unicodedata import sqlalchemy as sa from sqlalchemy import schema, events, event, inspect from sqlalchemy import MetaData, Integer, String from sqlalchemy.testing import (ComparesTables, engines, AssertsCompiledSQL, fixtures, skip) from sqlalchemy.testing.schema import Table, Column from sqlalchemy.testing import eq_, assert_raises, assert_raises_message from sqlalchemy import testing from sqlalchemy.util import ue metadata, users = None, None class ReflectionTest(fixtures.TestBase, ComparesTables): __backend__ = True @testing.exclude('mssql', '<', (10, 0, 0), 'Date is only supported on MSSQL 2008+') @testing.exclude('mysql', '<', (4, 1, 1), 'early types are squirrely') @testing.provide_metadata def test_basic_reflection(self): meta = self.metadata users = Table('engine_users', meta, Column('user_id', sa.INT, primary_key=True), Column('user_name', sa.VARCHAR(20), nullable=False), Column('test1', sa.CHAR(5), nullable=False), Column('test2', sa.Float(5), nullable=False), Column('test3', sa.Text), Column('test4', sa.Numeric(10, 2), nullable=False), Column('test5', sa.Date), Column('parent_user_id', sa.Integer, sa.ForeignKey('engine_users.user_id')), Column('test6', sa.Date, nullable=False), Column('test7', sa.Text), Column('test8', sa.LargeBinary), Column('test_passivedefault2', sa.Integer, server_default='5'), Column('test9', sa.LargeBinary(100)), Column('test10', sa.Numeric(10, 2)), test_needs_fk=True, ) addresses = Table( 'engine_email_addresses', meta, Column('address_id', sa.Integer, primary_key=True), Column('remote_user_id', sa.Integer, sa.ForeignKey(users.c.user_id)), Column('email_address', sa.String(20)), test_needs_fk=True, ) meta.create_all() meta2 = MetaData() reflected_users = Table('engine_users', meta2, autoload=True, autoload_with=testing.db) reflected_addresses = Table('engine_email_addresses', meta2, autoload=True, autoload_with=testing.db) self.assert_tables_equal(users, reflected_users) self.assert_tables_equal(addresses, reflected_addresses) @testing.provide_metadata def test_autoload_with_imply_autoload(self,): meta = self.metadata t = Table( 't', meta, Column('id', sa.Integer, primary_key=True), Column('x', sa.String(20)), Column('y', sa.Integer)) meta.create_all() meta2 = MetaData() reflected_t = Table('t', meta2, autoload_with=testing.db) self.assert_tables_equal(t, reflected_t) @testing.provide_metadata def test_two_foreign_keys(self): meta = self.metadata Table( 't1', meta, Column('id', sa.Integer, primary_key=True), Column('t2id', sa.Integer, sa.ForeignKey('t2.id')), Column('t3id', sa.Integer, sa.ForeignKey('t3.id')), test_needs_fk=True, ) Table('t2', meta, Column('id', sa.Integer, primary_key=True), test_needs_fk=True) Table('t3', meta, Column('id', sa.Integer, primary_key=True), test_needs_fk=True) meta.create_all() meta2 = MetaData() t1r, t2r, t3r = [Table(x, meta2, autoload=True, autoload_with=testing.db) for x in ('t1', 't2', 't3')] assert t1r.c.t2id.references(t2r.c.id) assert t1r.c.t3id.references(t3r.c.id) def test_nonexistent(self): meta = MetaData(testing.db) assert_raises(sa.exc.NoSuchTableError, Table, 'nonexistent', meta, autoload=True) assert 'nonexistent' not in meta.tables @testing.provide_metadata def test_include_columns(self): meta = self.metadata foo = Table('foo', meta, *[Column(n, sa.String(30)) for n in ['a', 'b', 'c', 'd', 'e', 'f']]) meta.create_all() meta2 = MetaData(testing.db) foo = Table('foo', meta2, autoload=True, include_columns=['b', 'f', 'e']) # test that cols come back in original order eq_([c.name for c in foo.c], ['b', 'e', 'f']) for c in ('b', 'f', 'e'): assert c in foo.c for c in ('a', 'c', 'd'): assert c not in foo.c # test against a table which is already reflected meta3 = MetaData(testing.db) foo = Table('foo', meta3, autoload=True) foo = Table('foo', meta3, include_columns=['b', 'f', 'e'], extend_existing=True) eq_([c.name for c in foo.c], ['b', 'e', 'f']) for c in ('b', 'f', 'e'): assert c in foo.c for c in ('a', 'c', 'd'): assert c not in foo.c @testing.provide_metadata def test_extend_existing(self): meta = self.metadata Table('t', meta, Column('id', Integer, primary_key=True), Column('x', Integer), Column('y', Integer), Column('z', Integer, server_default="5"), ) meta.create_all() m2 = MetaData() old_z = Column('z', String, primary_key=True) old_y = Column('y', String) old_q = Column('q', Integer) t2 = Table('t', m2, old_z, old_q) eq_(t2.primary_key.columns, (t2.c.z, )) t2 = Table('t', m2, old_y, extend_existing=True, autoload=True, autoload_with=testing.db) eq_( set(t2.columns.keys()), set(['x', 'y', 'z', 'q', 'id']) ) eq_(t2.primary_key.columns, (t2.c.id, )) assert t2.c.z is not old_z assert t2.c.y is old_y assert t2.c.z.type._type_affinity is Integer assert t2.c.q is old_q m3 = MetaData() t3 = Table('t', m3, Column('z', Integer)) t3 = Table('t', m3, extend_existing=False, autoload=True, autoload_with=testing.db) eq_( set(t3.columns.keys()), set(['z']) ) m4 = MetaData() old_z = Column('z', String, primary_key=True) old_y = Column('y', String) old_q = Column('q', Integer) t4 = Table('t', m4, old_z, old_q) eq_(t4.primary_key.columns, (t4.c.z, )) t4 = Table('t', m4, old_y, extend_existing=True, autoload=True, autoload_replace=False, autoload_with=testing.db) eq_( set(t4.columns.keys()), set(['x', 'y', 'z', 'q', 'id']) ) eq_(t4.primary_key.columns, (t4.c.id, )) assert t4.c.z is old_z assert t4.c.y is old_y assert t4.c.z.type._type_affinity is String assert t4.c.q is old_q @testing.emits_warning(r".*omitted columns") @testing.provide_metadata def test_include_columns_indexes(self): m = self.metadata t1 = Table('t1', m, Column('a', sa.Integer), Column('b', sa.Integer)) sa.Index('foobar', t1.c.a, t1.c.b) sa.Index('bat', t1.c.a) m.create_all() m2 = MetaData(testing.db) t2 = Table('t1', m2, autoload=True) assert len(t2.indexes) == 2 m2 = MetaData(testing.db) t2 = Table('t1', m2, autoload=True, include_columns=['a']) assert len(t2.indexes) == 1 m2 = MetaData(testing.db) t2 = Table('t1', m2, autoload=True, include_columns=['a', 'b']) assert len(t2.indexes) == 2 @testing.provide_metadata def test_autoload_replace_foreign_key_nonpresent(self): """test autoload_replace=False with col plus FK establishes the FK not present in the DB. """ Table('a', self.metadata, Column('id', Integer, primary_key=True)) Table('b', self.metadata, Column('id', Integer, primary_key=True), Column('a_id', Integer)) self.metadata.create_all() m2 = MetaData() b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id'))) a2 = Table('a', m2, autoload=True, autoload_with=testing.db) b2 = Table('b', m2, extend_existing=True, autoload=True, autoload_with=testing.db, autoload_replace=False) assert b2.c.id is not None assert b2.c.a_id.references(a2.c.id) eq_(len(b2.constraints), 2) @testing.provide_metadata def test_autoload_replace_foreign_key_ispresent(self): """test autoload_replace=False with col plus FK mirroring DB-reflected FK skips the reflected FK and installs the in-python one only. """ Table('a', self.metadata, Column('id', Integer, primary_key=True)) Table('b', self.metadata, Column('id', Integer, primary_key=True), Column('a_id', Integer, sa.ForeignKey('a.id'))) self.metadata.create_all() m2 = MetaData() b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id'))) a2 = Table('a', m2, autoload=True, autoload_with=testing.db) b2 = Table('b', m2, extend_existing=True, autoload=True, autoload_with=testing.db, autoload_replace=False) assert b2.c.id is not None assert b2.c.a_id.references(a2.c.id) eq_(len(b2.constraints), 2) @testing.provide_metadata def test_autoload_replace_foreign_key_removed(self): """test autoload_replace=False with col minus FK that's in the DB means the FK is skipped and doesn't get installed at all. """ Table('a', self.metadata, Column('id', Integer, primary_key=True)) Table('b', self.metadata, Column('id', Integer, primary_key=True), Column('a_id', Integer, sa.ForeignKey('a.id'))) self.metadata.create_all() m2 = MetaData() b2 = Table('b', m2, Column('a_id', Integer)) a2 = Table('a', m2, autoload=True, autoload_with=testing.db) b2 = Table('b', m2, extend_existing=True, autoload=True, autoload_with=testing.db, autoload_replace=False) assert b2.c.id is not None assert not b2.c.a_id.references(a2.c.id) eq_(len(b2.constraints), 1) @testing.provide_metadata def test_autoload_replace_primary_key(self): Table('a', self.metadata, Column('id', Integer)) self.metadata.create_all() m2 = MetaData() a2 = Table('a', m2, Column('id', Integer, primary_key=True)) Table('a', m2, autoload=True, autoload_with=testing.db, autoload_replace=False, extend_existing=True) eq_(list(a2.primary_key), [a2.c.id]) def test_autoload_replace_arg(self): Table('t', MetaData(), autoload_replace=False) @testing.provide_metadata def test_autoincrement_col(self): """test that 'autoincrement' is reflected according to sqla's policy. Don't mark this test as unsupported for any backend ! (technically it fails with MySQL InnoDB since "id" comes before "id2") """ meta = self.metadata Table('test', meta, Column('id', sa.Integer, primary_key=True), Column('data', sa.String(50)), mysql_engine='MyISAM' ) Table('test2', meta, Column('id', sa.Integer, sa.ForeignKey('test.id'), primary_key=True), Column('id2', sa.Integer, primary_key=True), Column('data', sa.String(50)), mysql_engine='MyISAM' ) meta.create_all() m2 = MetaData(testing.db) t1a = Table('test', m2, autoload=True) assert t1a._autoincrement_column is t1a.c.id t2a = Table('test2', m2, autoload=True) assert t2a._autoincrement_column is t2a.c.id2 @skip('sqlite') @testing.provide_metadata def test_unknown_types(self): """Test the handling of unknown types for the given dialect. sqlite is skipped because it has special rules for unknown types using 'affinity types' - this feature is tested in that dialect's test spec. """ meta = self.metadata t = Table("test", meta, Column('foo', sa.DateTime)) ischema_names = testing.db.dialect.ischema_names t.create() testing.db.dialect.ischema_names = {} try: m2 = MetaData(testing.db) assert_raises(sa.exc.SAWarning, Table, "test", m2, autoload=True) @testing.emits_warning('Did not recognize type') def warns(): m3 = MetaData(testing.db) t3 = Table("test", m3, autoload=True) assert t3.c.foo.type.__class__ == sa.types.NullType finally: testing.db.dialect.ischema_names = ischema_names @testing.provide_metadata def test_basic_override(self): meta = self.metadata table = Table( 'override_test', meta, Column('col1', sa.Integer, primary_key=True), Column('col2', sa.String(20)), Column('col3', sa.Numeric) ) table.create() meta2 = MetaData(testing.db) table = Table( 'override_test', meta2, Column('col2', sa.Unicode()), Column('col4', sa.String(30)), autoload=True) self.assert_(isinstance(table.c.col1.type, sa.Integer)) self.assert_(isinstance(table.c.col2.type, sa.Unicode)) self.assert_(isinstance(table.c.col4.type, sa.String)) @testing.provide_metadata def test_override_upgrade_pk_flag(self): meta = self.metadata table = Table( 'override_test', meta, Column('col1', sa.Integer), Column('col2', sa.String(20)), Column('col3', sa.Numeric) ) table.create() meta2 = MetaData(testing.db) table = Table( 'override_test', meta2, Column('col1', sa.Integer, primary_key=True), autoload=True) eq_(list(table.primary_key), [table.c.col1]) eq_(table.c.col1.primary_key, True) @testing.provide_metadata def test_override_pkfk(self): """test that you can override columns which contain foreign keys to other reflected tables, where the foreign key column is also a primary key column""" meta = self.metadata Table('users', meta, Column('id', sa.Integer, primary_key=True), Column('name', sa.String(30))) Table('addresses', meta, Column('id', sa.Integer, primary_key=True), Column('street', sa.String(30))) meta.create_all() meta2 = MetaData(testing.db) a2 = Table('addresses', meta2, Column('id', sa.Integer, sa.ForeignKey('users.id'), primary_key=True), autoload=True) u2 = Table('users', meta2, autoload=True) assert list(a2.primary_key) == [a2.c.id] assert list(u2.primary_key) == [u2.c.id] assert u2.join(a2).onclause.compare(u2.c.id == a2.c.id) meta3 = MetaData(testing.db) u3 = Table('users', meta3, autoload=True) a3 = Table('addresses', meta3, Column('id', sa.Integer, sa.ForeignKey('users.id'), primary_key=True), autoload=True) assert list(a3.primary_key) == [a3.c.id] assert list(u3.primary_key) == [u3.c.id] assert u3.join(a3).onclause.compare(u3.c.id == a3.c.id) @testing.provide_metadata def test_override_nonexistent_fk(self): """test that you can override columns and create new foreign keys to other reflected tables which have no foreign keys. this is common with MySQL MyISAM tables.""" meta = self.metadata Table('users', meta, Column('id', sa.Integer, primary_key=True), Column('name', sa.String(30))) Table('addresses', meta, Column('id', sa.Integer, primary_key=True), Column('street', sa.String(30)), Column('user_id', sa.Integer)) meta.create_all() meta2 = MetaData(testing.db) a2 = Table('addresses', meta2, Column('user_id', sa.Integer, sa.ForeignKey('users.id')), autoload=True) u2 = Table('users', meta2, autoload=True) assert len(a2.c.user_id.foreign_keys) == 1 assert len(a2.foreign_keys) == 1 assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id] assert [c.parent for c in a2.c.user_id.foreign_keys] \ == [a2.c.user_id] assert list(a2.c.user_id.foreign_keys)[0].parent \ is a2.c.user_id assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id) meta3 = MetaData(testing.db) u3 = Table('users', meta3, autoload=True) a3 = Table('addresses', meta3, Column('user_id', sa.Integer, sa.ForeignKey('users.id')), autoload=True) assert u3.join(a3).onclause.compare(u3.c.id == a3.c.user_id) meta4 = MetaData(testing.db) u4 = Table('users', meta4, Column('id', sa.Integer, key='u_id', primary_key=True), autoload=True) a4 = Table( 'addresses', meta4, Column('id', sa.Integer, key='street', primary_key=True), Column('street', sa.String(30), key='user_id'), Column('user_id', sa.Integer, sa.ForeignKey('users.u_id' ), key='id'), autoload=True, ) assert u4.join(a4).onclause.compare(u4.c.u_id == a4.c.id) assert list(u4.primary_key) == [u4.c.u_id] assert len(u4.columns) == 2 assert len(u4.constraints) == 1 assert len(a4.columns) == 3 assert len(a4.constraints) == 2 @testing.provide_metadata def test_override_composite_fk(self): """Test double-remove of composite foreign key, when replaced.""" metadata = self.metadata Table('a', metadata, Column('x', sa.Integer, primary_key=True), Column('y', sa.Integer, primary_key=True), ) Table('b', metadata, Column('x', sa.Integer, primary_key=True), Column('y', sa.Integer, primary_key=True), sa.ForeignKeyConstraint(['x', 'y'], ['a.x', 'a.y']) ) metadata.create_all() meta2 = MetaData() c1 = Column('x', sa.Integer, primary_key=True) c2 = Column('y', sa.Integer, primary_key=True) f1 = sa.ForeignKeyConstraint(['x', 'y'], ['a.x', 'a.y']) b1 = Table('b', meta2, c1, c2, f1, autoload=True, autoload_with=testing.db ) assert b1.c.x is c1 assert b1.c.y is c2 assert f1 in b1.constraints assert len(b1.constraints) == 2 @testing.provide_metadata def test_override_keys(self): """test that columns can be overridden with a 'key', and that ForeignKey targeting during reflection still works.""" meta = self.metadata Table('a', meta, Column('x', sa.Integer, primary_key=True), Column('z', sa.Integer), test_needs_fk=True ) Table('b', meta, Column('y', sa.Integer, sa.ForeignKey('a.x')), test_needs_fk=True ) meta.create_all() m2 = MetaData(testing.db) a2 = Table('a', m2, Column('x', sa.Integer, primary_key=True, key='x1'), autoload=True) b2 = Table('b', m2, autoload=True) assert a2.join(b2).onclause.compare(a2.c.x1 == b2.c.y) assert b2.c.y.references(a2.c.x1) @testing.provide_metadata def test_nonreflected_fk_raises(self): """test that a NoReferencedColumnError is raised when reflecting a table with an FK to another table which has not included the target column in its reflection. """ meta = self.metadata Table('a', meta, Column('x', sa.Integer, primary_key=True), Column('z', sa.Integer), test_needs_fk=True ) Table('b', meta, Column('y', sa.Integer, sa.ForeignKey('a.x')), test_needs_fk=True ) meta.create_all() m2 = MetaData(testing.db) a2 = Table('a', m2, include_columns=['z'], autoload=True) b2 = Table('b', m2, autoload=True) assert_raises(sa.exc.NoReferencedColumnError, a2.join, b2) @testing.exclude('mysql', '<', (4, 1, 1), 'innodb funkiness') @testing.provide_metadata def test_override_existing_fk(self): """test that you can override columns and specify new foreign keys to other reflected tables, on columns which *do* already have that foreign key, and that the FK is not duped. """ meta = self.metadata Table('users', meta, Column('id', sa.Integer, primary_key=True), Column('name', sa.String(30)), test_needs_fk=True) Table('addresses', meta, Column('id', sa.Integer, primary_key=True), Column('user_id', sa.Integer, sa.ForeignKey('users.id')), test_needs_fk=True) meta.create_all() meta2 = MetaData(testing.db) a2 = Table('addresses', meta2, Column('user_id', sa.Integer, sa.ForeignKey('users.id')), autoload=True) u2 = Table('users', meta2, autoload=True) s = sa.select([a2]) assert s.c.user_id is not None assert len(a2.foreign_keys) == 1 assert len(a2.c.user_id.foreign_keys) == 1 assert len(a2.constraints) == 2 assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id] assert [c.parent for c in a2.c.user_id.foreign_keys] \ == [a2.c.user_id] assert list(a2.c.user_id.foreign_keys)[0].parent \ is a2.c.user_id assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id) meta2 = MetaData(testing.db) u2 = Table('users', meta2, Column('id', sa.Integer, primary_key=True), autoload=True) a2 = Table('addresses', meta2, Column('id', sa.Integer, primary_key=True), Column('user_id', sa.Integer, sa.ForeignKey('users.id')), autoload=True) s = sa.select([a2]) assert s.c.user_id is not None assert len(a2.foreign_keys) == 1 assert len(a2.c.user_id.foreign_keys) == 1 assert len(a2.constraints) == 2 assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id] assert [c.parent for c in a2.c.user_id.foreign_keys] \ == [a2.c.user_id] assert list(a2.c.user_id.foreign_keys)[0].parent \ is a2.c.user_id assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id) @testing.only_on(['postgresql', 'mysql']) @testing.provide_metadata def test_fk_options(self): """test that foreign key reflection includes options (on backends with {dialect}.get_foreign_keys() support)""" if testing.against('postgresql'): test_attrs = ('match', 'onupdate', 'ondelete', 'deferrable', 'initially') addresses_user_id_fkey = sa.ForeignKey( # Each option is specifically not a Postgres default, or # it won't be returned by PG's inspection 'users.id', name='addresses_user_id_fkey', match='FULL', onupdate='RESTRICT', ondelete='RESTRICT', deferrable=True, initially='DEFERRED' ) elif testing.against('mysql'): # MATCH, DEFERRABLE, and INITIALLY cannot be defined for MySQL # ON UPDATE and ON DELETE have defaults of RESTRICT, which are # elided by MySQL's inspection addresses_user_id_fkey = sa.ForeignKey( 'users.id', name='addresses_user_id_fkey', onupdate='CASCADE', ondelete='CASCADE' ) test_attrs = ('onupdate', 'ondelete') meta = self.metadata Table('users', meta, Column('id', sa.Integer, primary_key=True), Column('name', sa.String(30)), test_needs_fk=True) Table('addresses', meta, Column('id', sa.Integer, primary_key=True), Column('user_id', sa.Integer, addresses_user_id_fkey), test_needs_fk=True) meta.create_all() meta2 = MetaData() meta2.reflect(testing.db) for fk in meta2.tables['addresses'].foreign_keys: ref = addresses_user_id_fkey for attr in test_attrs: eq_(getattr(fk, attr), getattr(ref, attr)) def test_pks_not_uniques(self): """test that primary key reflection not tripped up by unique indexes""" testing.db.execute(""" CREATE TABLE book ( id INTEGER NOT NULL, title VARCHAR(100) NOT NULL, series INTEGER, series_id INTEGER, UNIQUE(series, series_id), PRIMARY KEY(id) )""") try: metadata = MetaData(bind=testing.db) book = Table('book', metadata, autoload=True) assert book.primary_key.contains_column(book.c.id) assert not book.primary_key.contains_column(book.c.series) assert len(book.primary_key) == 1 finally: testing.db.execute("drop table book") def test_fk_error(self): metadata = MetaData(testing.db) Table('slots', metadata, Column('slot_id', sa.Integer, primary_key=True), Column('pkg_id', sa.Integer, sa.ForeignKey('pkgs.pkg_id')), Column('slot', sa.String(128)), ) assert_raises_message( sa.exc.InvalidRequestError, "Foreign key associated with column 'slots.pkg_id' " "could not find table 'pkgs' with which to generate " "a foreign key to target column 'pkg_id'", metadata.create_all) def test_composite_pks(self): """test reflection of a composite primary key""" testing.db.execute(""" CREATE TABLE book ( id INTEGER NOT NULL, isbn VARCHAR(50) NOT NULL, title VARCHAR(100) NOT NULL, series INTEGER NOT NULL, series_id INTEGER NOT NULL, UNIQUE(series, series_id), PRIMARY KEY(id, isbn) )""") try: metadata = MetaData(bind=testing.db) book = Table('book', metadata, autoload=True) assert book.primary_key.contains_column(book.c.id) assert book.primary_key.contains_column(book.c.isbn) assert not book.primary_key.contains_column(book.c.series) assert len(book.primary_key) == 2 finally: testing.db.execute("drop table book") @testing.exclude('mysql', '<', (4, 1, 1), 'innodb funkiness') @testing.provide_metadata def test_composite_fk(self): """test reflection of composite foreign keys""" meta = self.metadata multi = Table( 'multi', meta, Column('multi_id', sa.Integer, primary_key=True), Column('multi_rev', sa.Integer, primary_key=True), Column('multi_hoho', sa.Integer, primary_key=True), Column('name', sa.String(50), nullable=False), Column('val', sa.String(100)), test_needs_fk=True, ) multi2 = Table('multi2', meta, Column('id', sa.Integer, primary_key=True), Column('foo', sa.Integer), Column('bar', sa.Integer), Column('lala', sa.Integer), Column('data', sa.String(50)), sa.ForeignKeyConstraint(['foo', 'bar', 'lala'], ['multi.multi_id', 'multi.multi_rev', 'multi.multi_hoho' ]), test_needs_fk=True, ) meta.create_all() meta2 = MetaData() table = Table('multi', meta2, autoload=True, autoload_with=testing.db) table2 = Table('multi2', meta2, autoload=True, autoload_with=testing.db) self.assert_tables_equal(multi, table) self.assert_tables_equal(multi2, table2) j = sa.join(table, table2) self.assert_(sa.and_(table.c.multi_id == table2.c.foo, table.c.multi_rev == table2.c.bar, table.c.multi_hoho == table2.c.lala).compare(j.onclause)) @testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on') @testing.requires.check_constraints @testing.provide_metadata def test_reserved(self): # check a table that uses an SQL reserved name doesn't cause an # error meta = self.metadata table_a = Table('select', meta, Column('not', sa.Integer, primary_key=True), Column('from', sa.String(12), nullable=False), sa.UniqueConstraint('from', name='when')) sa.Index('where', table_a.c['from']) # There's currently no way to calculate identifier case # normalization in isolation, so... if testing.against('firebird', 'oracle'): check_col = 'TRUE' else: check_col = 'true' quoter = meta.bind.dialect.identifier_preparer.quote_identifier Table('false', meta, Column('create', sa.Integer, primary_key=True), Column('true', sa.Integer, sa.ForeignKey('select.not')), sa.CheckConstraint('%s <> 1' % quoter(check_col), name='limit') ) table_c = Table('is', meta, Column('or', sa.Integer, nullable=False, primary_key=True), Column('join', sa.Integer, nullable=False, primary_key=True), sa.PrimaryKeyConstraint('or', 'join', name='to') ) index_c = sa.Index('else', table_c.c.join) meta.create_all() index_c.drop() meta2 = MetaData(testing.db) Table('select', meta2, autoload=True) Table('false', meta2, autoload=True) Table('is', meta2, autoload=True) @testing.provide_metadata def _test_reflect_uses_bind(self, fn): from sqlalchemy.pool import AssertionPool e = engines.testing_engine(options={"poolclass": AssertionPool}) fn(e) @testing.uses_deprecated() def test_reflect_uses_bind_constructor_conn(self): self._test_reflect_uses_bind(lambda e: MetaData(e.connect(), reflect=True)) @testing.uses_deprecated() def test_reflect_uses_bind_constructor_engine(self): self._test_reflect_uses_bind(lambda e: MetaData(e, reflect=True)) def test_reflect_uses_bind_constructor_conn_reflect(self): self._test_reflect_uses_bind(lambda e: MetaData(e.connect()).reflect()) def test_reflect_uses_bind_constructor_engine_reflect(self): self._test_reflect_uses_bind(lambda e: MetaData(e).reflect()) def test_reflect_uses_bind_conn_reflect(self): self._test_reflect_uses_bind(lambda e: MetaData().reflect(e.connect())) def test_reflect_uses_bind_engine_reflect(self): self._test_reflect_uses_bind(lambda e: MetaData().reflect(e)) @testing.provide_metadata def test_reflect_all(self): existing = testing.db.table_names() names = ['rt_%s' % name for name in ('a', 'b', 'c', 'd', 'e')] nameset = set(names) for name in names: # be sure our starting environment is sane self.assert_(name not in existing) self.assert_('rt_f' not in existing) baseline = self.metadata for name in names: Table(name, baseline, Column('id', sa.Integer, primary_key=True)) baseline.create_all() m1 = MetaData(testing.db) self.assert_(not m1.tables) m1.reflect() self.assert_(nameset.issubset(set(m1.tables.keys()))) m2 = MetaData() m2.reflect(testing.db, only=['rt_a', 'rt_b']) self.assert_(set(m2.tables.keys()) == set(['rt_a', 'rt_b'])) m3 = MetaData() c = testing.db.connect() m3.reflect(bind=c, only=lambda name, meta: name == 'rt_c') self.assert_(set(m3.tables.keys()) == set(['rt_c'])) m4 = MetaData(testing.db) try: m4.reflect(only=['rt_a', 'rt_f']) self.assert_(False) except sa.exc.InvalidRequestError as e: self.assert_(e.args[0].endswith('(rt_f)')) m5 = MetaData(testing.db) m5.reflect(only=[]) self.assert_(not m5.tables) m6 = MetaData(testing.db) m6.reflect(only=lambda n, m: False) self.assert_(not m6.tables) m7 = MetaData(testing.db) m7.reflect() self.assert_(nameset.issubset(set(m7.tables.keys()))) m8 = MetaData() assert_raises( sa.exc.UnboundExecutionError, m8.reflect ) m8_e1 = MetaData(testing.db) rt_c = Table('rt_c', m8_e1) m8_e1.reflect(extend_existing=True) eq_(set(m8_e1.tables.keys()), set(names)) eq_(rt_c.c.keys(), ['id']) m8_e2 = MetaData(testing.db) rt_c = Table('rt_c', m8_e2) m8_e2.reflect(extend_existing=True, only=['rt_a', 'rt_c']) eq_(set(m8_e2.tables.keys()), set(['rt_a', 'rt_c'])) eq_(rt_c.c.keys(), ['id']) if existing: print("Other tables present in database, skipping some checks.") else: baseline.drop_all() m9 = MetaData(testing.db) m9.reflect() self.assert_(not m9.tables) def test_reflect_all_conn_closing(self): m1 = MetaData() c = testing.db.connect() m1.reflect(bind=c) assert not c.closed def test_inspector_conn_closing(self): c = testing.db.connect() inspect(c) assert not c.closed @testing.provide_metadata def test_index_reflection(self): m1 = self.metadata t1 = Table('party', m1, Column('id', sa.Integer, nullable=False), Column('name', sa.String(20), index=True) ) sa.Index('idx1', t1.c.id, unique=True) sa.Index('idx2', t1.c.name, t1.c.id, unique=False) m1.create_all() m2 = MetaData(testing.db) t2 = Table('party', m2, autoload=True) assert len(t2.indexes) == 3 # Make sure indexes are in the order we expect them in tmp = [(idx.name, idx) for idx in t2.indexes] tmp.sort() r1, r2, r3 = [idx[1] for idx in tmp] assert r1.name == 'idx1' assert r2.name == 'idx2' assert r1.unique == True assert r2.unique == False assert r3.unique == False assert set([t2.c.id]) == set(r1.columns) assert set([t2.c.name, t2.c.id]) == set(r2.columns) assert set([t2.c.name]) == set(r3.columns) @testing.provide_metadata def test_index_reflection_cols_busted(self): t = Table('x', self.metadata, Column('a', Integer), Column('b', Integer)) sa.Index('x_ix', t.c.a, t.c.b) self.metadata.create_all() def mock_get_columns(self, connection, table_name, **kw): return [ {"name": "b", "type": Integer, "primary_key": False} ] with testing.mock.patch.object( testing.db.dialect, "get_columns", mock_get_columns): m = MetaData() with testing.expect_warnings( "index key 'a' was not located in columns"): t = Table('x', m, autoload=True, autoload_with=testing.db) eq_(list(t.indexes)[0].columns, [t.c.b]) @testing.requires.views @testing.provide_metadata def test_views(self): metadata = self.metadata users, addresses, dingalings = createTables(metadata) try: metadata.create_all() _create_views(metadata.bind, None) m2 = MetaData(testing.db) users_v = Table("users_v", m2, autoload=True) addresses_v = Table("email_addresses_v", m2, autoload=True) for c1, c2 in zip(users_v.c, users.c): eq_(c1.name, c2.name) self.assert_types_base(c1, c2) for c1, c2 in zip(addresses_v.c, addresses.c): eq_(c1.name, c2.name) self.assert_types_base(c1, c2) finally: _drop_views(metadata.bind) @testing.requires.views @testing.provide_metadata def test_reflect_all_with_views(self): metadata = self.metadata users, addresses, dingalings = createTables(metadata, None) try: metadata.create_all() _create_views(metadata.bind, None) m2 = MetaData(testing.db) m2.reflect(views=False) eq_( set(m2.tables), set(['users', 'email_addresses', 'dingalings']) ) m2 = MetaData(testing.db) m2.reflect(views=True) eq_( set(m2.tables), set(['email_addresses_v', 'users_v', 'users', 'dingalings', 'email_addresses']) ) finally: _drop_views(metadata.bind) class CreateDropTest(fixtures.TestBase): __backend__ = True @classmethod def setup_class(cls): global metadata, users metadata = MetaData() users = Table('users', metadata, Column('user_id', sa.Integer, sa.Sequence('user_id_seq', optional=True), primary_key=True), Column('user_name', sa.String(40))) Table('email_addresses', metadata, Column('address_id', sa.Integer, sa.Sequence('address_id_seq', optional=True), primary_key=True), Column('user_id', sa.Integer, sa.ForeignKey(users.c.user_id)), Column('email_address', sa.String(40))) Table( 'orders', metadata, Column('order_id', sa.Integer, sa.Sequence('order_id_seq', optional=True), primary_key=True), Column('user_id', sa.Integer, sa.ForeignKey(users.c.user_id)), Column('description', sa.String(50)), Column('isopen', sa.Integer), ) Table('items', metadata, Column('item_id', sa.INT, sa.Sequence('items_id_seq', optional=True), primary_key=True), Column('order_id', sa.INT, sa.ForeignKey('orders')), Column('item_name', sa.VARCHAR(50))) def test_sorter(self): tables = metadata.sorted_tables table_names = [t.name for t in tables] ua = [n for n in table_names if n in ('users', 'email_addresses')] oi = [n for n in table_names if n in ('orders', 'items')] eq_(ua, ['users', 'email_addresses']) eq_(oi, ['orders', 'items']) def test_checkfirst(self): try: assert not users.exists(testing.db) users.create(bind=testing.db) assert users.exists(testing.db) users.create(bind=testing.db, checkfirst=True) users.drop(bind=testing.db) users.drop(bind=testing.db, checkfirst=True) assert not users.exists(bind=testing.db) users.create(bind=testing.db, checkfirst=True) users.drop(bind=testing.db) finally: metadata.drop_all(bind=testing.db) def test_createdrop(self): metadata.create_all(bind=testing.db) eq_(testing.db.has_table('items'), True) eq_(testing.db.has_table('email_addresses'), True) metadata.create_all(bind=testing.db) eq_(testing.db.has_table('items'), True) metadata.drop_all(bind=testing.db) eq_(testing.db.has_table('items'), False) eq_(testing.db.has_table('email_addresses'), False) metadata.drop_all(bind=testing.db) eq_(testing.db.has_table('items'), False) def test_tablenames(self): metadata.create_all(bind=testing.db) # we only check to see if all the explicitly created tables are # there, rather than assertEqual -- the test db could have # "extra" tables if there is a misconfigured template. (*cough* # tsearch2 w/ the pg windows installer.) self.assert_(not set(metadata.tables) - set(testing.db.table_names())) metadata.drop_all(bind=testing.db) class SchemaManipulationTest(fixtures.TestBase): __backend__ = True def test_append_constraint_unique(self): meta = MetaData() users = Table('users', meta, Column('id', sa.Integer)) addresses = Table('addresses', meta, Column('id', sa.Integer), Column('user_id', sa.Integer)) fk = sa.ForeignKeyConstraint(['user_id'], [users.c.id]) addresses.append_constraint(fk) addresses.append_constraint(fk) assert len(addresses.c.user_id.foreign_keys) == 1 assert addresses.constraints == set([addresses.primary_key, fk]) class UnicodeReflectionTest(fixtures.TestBase): __backend__ = True @classmethod def setup_class(cls): cls.metadata = metadata = MetaData() no_multibyte_period = set([ ('plain', 'col_plain', 'ix_plain') ]) no_has_table = [ ( 'no_has_table_1', ue('col_Unit\u00e9ble'), ue('ix_Unit\u00e9ble') ), ( 'no_has_table_2', ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66') ), ] no_case_sensitivity = [ ( ue('\u6e2c\u8a66'), ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66') ), ( ue('unit\u00e9ble'), ue('col_unit\u00e9ble'), ue('ix_unit\u00e9ble') ), ] full = [ ( ue('Unit\u00e9ble'), ue('col_Unit\u00e9ble'), ue('ix_Unit\u00e9ble') ), ( ue('\u6e2c\u8a66'), ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66') ), ] # as you can see, our options for this kind of thing # are really limited unless you're on PG or SQLite # forget about it on these backends if not testing.requires.unicode_ddl.enabled: names = no_multibyte_period # mysql can't handle casing usually elif testing.against("mysql") and \ not testing.requires.mysql_fully_case_sensitive.enabled: names = no_multibyte_period.union(no_case_sensitivity) # mssql + pyodbc + freetds can't compare multibyte names to # information_schema.tables.table_name elif testing.against("mssql"): names = no_multibyte_period.union(no_has_table) else: names = no_multibyte_period.union(full) for tname, cname, ixname in names: t = Table(tname, metadata, Column('id', sa.Integer, sa.Sequence(cname + '_id_seq'), primary_key=True), Column(cname, Integer) ) schema.Index(ixname, t.c[cname]) metadata.create_all(testing.db) cls.names = names @classmethod def teardown_class(cls): cls.metadata.drop_all(testing.db, checkfirst=False) @testing.requires.unicode_connections def test_has_table(self): for tname, cname, ixname in self.names: assert testing.db.has_table(tname), "Can't detect name %s" % tname @testing.requires.unicode_connections def test_basic(self): # the 'convert_unicode' should not get in the way of the # reflection process. reflecttable for oracle, postgresql # (others?) expect non-unicode strings in result sets/bind # params bind = testing.db names = set([rec[0] for rec in self.names]) reflected = set(bind.table_names()) # Jython 2.5 on Java 5 lacks unicodedata.normalize if not names.issubset(reflected) and hasattr(unicodedata, 'normalize'): # Python source files in the utf-8 coding seem to # normalize literals as NFC (and the above are # explicitly NFC). Maybe this database normalizes NFD # on reflection. nfc = set([unicodedata.normalize('NFC', n) for n in names]) self.assert_(nfc == names) # Yep. But still ensure that bulk reflection and # create/drop work with either normalization. r = MetaData(bind) r.reflect() r.drop_all(checkfirst=False) r.create_all(checkfirst=False) @testing.requires.unicode_connections def test_get_names(self): inspector = inspect(testing.db) names = dict( (tname, (cname, ixname)) for tname, cname, ixname in self.names ) for tname in inspector.get_table_names(): assert tname in names eq_( [ (rec['name'], rec['column_names'][0]) for rec in inspector.get_indexes(tname) ], [(names[tname][1], names[tname][0])] ) class SchemaTest(fixtures.TestBase): __backend__ = True @testing.requires.schemas @testing.requires.cross_schema_fk_reflection def test_has_schema(self): eq_(testing.db.dialect.has_schema(testing.db, testing.config.test_schema), True) eq_(testing.db.dialect.has_schema(testing.db, 'sa_fake_schema_123'), False) @testing.requires.schemas @testing.fails_on('sqlite', 'FIXME: unknown') @testing.fails_on('sybase', 'FIXME: unknown') def test_explicit_default_schema(self): engine = testing.db engine.connect().close() if testing.against('sqlite'): # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc., # but fails on: # FOREIGN KEY(col2) REFERENCES main.table1 (col1) schema = 'main' else: schema = engine.dialect.default_schema_name assert bool(schema) metadata = MetaData(engine) Table('table1', metadata, Column('col1', sa.Integer, primary_key=True), test_needs_fk=True, schema=schema) Table('table2', metadata, Column('col1', sa.Integer, primary_key=True), Column('col2', sa.Integer, sa.ForeignKey('%s.table1.col1' % schema)), test_needs_fk=True, schema=schema) try: metadata.create_all() metadata.create_all(checkfirst=True) assert len(metadata.tables) == 2 metadata.clear() Table('table1', metadata, autoload=True, schema=schema) Table('table2', metadata, autoload=True, schema=schema) assert len(metadata.tables) == 2 finally: metadata.drop_all() @testing.requires.schemas @testing.fails_on('sybase', 'FIXME: unknown') def test_explicit_default_schema_metadata(self): engine = testing.db if testing.against('sqlite'): # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc., # but fails on: # FOREIGN KEY(col2) REFERENCES main.table1 (col1) schema = 'main' else: schema = engine.dialect.default_schema_name assert bool(schema) metadata = MetaData(engine, schema=schema) Table('table1', metadata, Column('col1', sa.Integer, primary_key=True), test_needs_fk=True) Table('table2', metadata, Column('col1', sa.Integer, primary_key=True), Column('col2', sa.Integer, sa.ForeignKey('table1.col1')), test_needs_fk=True) try: metadata.create_all() metadata.create_all(checkfirst=True) assert len(metadata.tables) == 2 metadata.clear() Table('table1', metadata, autoload=True) Table('table2', metadata, autoload=True) assert len(metadata.tables) == 2 finally: metadata.drop_all() @testing.requires.schemas @testing.provide_metadata def test_metadata_reflect_schema(self): metadata = self.metadata createTables(metadata, testing.config.test_schema) metadata.create_all() m2 = MetaData(schema=testing.config.test_schema, bind=testing.db) m2.reflect() eq_( set(m2.tables), set([ '%s.dingalings' % testing.config.test_schema, '%s.users' % testing.config.test_schema, '%s.email_addresses' % testing.config.test_schema ]) ) @testing.requires.schemas @testing.requires.cross_schema_fk_reflection @testing.provide_metadata def test_reflect_all_schemas_default_overlap(self): t1 = Table('t', self.metadata, Column('id', Integer, primary_key=True)) t2 = Table('t', self.metadata, Column('id1', sa.ForeignKey('t.id')), schema=testing.config.test_schema ) self.metadata.create_all() m2 = MetaData() m2.reflect(testing.db, schema=testing.config.test_schema) m3 = MetaData() m3.reflect(testing.db) m3.reflect(testing.db, schema=testing.config.test_schema) eq_( set((t.name, t.schema) for t in m2.tables.values()), set((t.name, t.schema) for t in m3.tables.values()) ) # Tests related to engine.reflection def createTables(meta, schema=None): if schema: schema_prefix = schema + "." else: schema_prefix = "" users = Table('users', meta, Column('user_id', sa.INT, primary_key=True), Column('user_name', sa.VARCHAR(20), nullable=False), Column('test1', sa.CHAR(5), nullable=False), Column('test2', sa.Float(5), nullable=False), Column('test3', sa.Text), Column('test4', sa.Numeric(10, 2), nullable=False), Column('test5', sa.Date), Column('test5_1', sa.TIMESTAMP), Column('parent_user_id', sa.Integer, sa.ForeignKey('%susers.user_id' % schema_prefix)), Column('test6', sa.Date, nullable=False), Column('test7', sa.Text), Column('test8', sa.LargeBinary), Column('test_passivedefault2', sa.Integer, server_default='5'), Column('test9', sa.LargeBinary(100)), Column('test10', sa.Numeric(10, 2)), schema=schema, test_needs_fk=True, ) dingalings = Table("dingalings", meta, Column('dingaling_id', sa.Integer, primary_key=True), Column('address_id', sa.Integer, sa.ForeignKey( '%semail_addresses.address_id' % schema_prefix)), Column('data', sa.String(30)), schema=schema, test_needs_fk=True, ) addresses = Table('email_addresses', meta, Column('address_id', sa.Integer), Column('remote_user_id', sa.Integer, sa.ForeignKey(users.c.user_id)), Column('email_address', sa.String(20)), sa.PrimaryKeyConstraint('address_id', name='email_ad_pk'), schema=schema, test_needs_fk=True, ) return (users, addresses, dingalings) def createIndexes(con, schema=None): fullname = 'users' if schema: fullname = "%s.%s" % (schema, 'users') query = "CREATE INDEX users_t_idx ON %s (test1, test2)" % fullname con.execute(sa.sql.text(query)) @testing.requires.views def _create_views(con, schema=None): for table_name in ('users', 'email_addresses'): fullname = table_name if schema: fullname = "%s.%s" % (schema, table_name) view_name = fullname + '_v' query = "CREATE VIEW %s AS SELECT * FROM %s" % (view_name, fullname) con.execute(sa.sql.text(query)) @testing.requires.views def _drop_views(con, schema=None): for table_name in ('email_addresses', 'users'): fullname = table_name if schema: fullname = "%s.%s" % (schema, table_name) view_name = fullname + '_v' query = "DROP VIEW %s" % view_name con.execute(sa.sql.text(query)) class ReverseCasingReflectTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = 'default' __backend__ = True @testing.requires.denormalized_names def setup(self): testing.db.execute(""" CREATE TABLE weird_casing( col1 char(20), "Col2" char(20), "col3" char(20) ) """) @testing.requires.denormalized_names def teardown(self): testing.db.execute("drop table weird_casing") @testing.requires.denormalized_names def test_direct_quoting(self): m = MetaData(testing.db) t = Table('weird_casing', m, autoload=True) self.assert_compile(t.select(), 'SELECT weird_casing.col1, ' 'weird_casing."Col2", weird_casing."col3" ' 'FROM weird_casing') class CaseSensitiveTest(fixtures.TablesTest): """Nail down case sensitive behaviors, mostly on MySQL.""" __backend__ = True @classmethod def define_tables(cls, metadata): Table('SomeTable', metadata, Column('x', Integer, primary_key=True), test_needs_fk=True ) Table('SomeOtherTable', metadata, Column('x', Integer, primary_key=True), Column('y', Integer, sa.ForeignKey("SomeTable.x")), test_needs_fk=True ) @testing.fails_if(testing.requires._has_mysql_on_windows) def test_table_names(self): x = testing.db.run_callable( testing.db.dialect.get_table_names ) assert set(["SomeTable", "SomeOtherTable"]).issubset(x) def test_reflect_exact_name(self): m = MetaData() t1 = Table("SomeTable", m, autoload=True, autoload_with=testing.db) eq_(t1.name, "SomeTable") assert t1.c.x is not None @testing.fails_if(lambda: testing.against(('mysql', '<', (5, 5))) and not testing.requires._has_mysql_fully_case_sensitive() ) def test_reflect_via_fk(self): m = MetaData() t2 = Table("SomeOtherTable", m, autoload=True, autoload_with=testing.db) eq_(t2.name, "SomeOtherTable") assert "SomeTable" in m.tables @testing.fails_if(testing.requires._has_mysql_fully_case_sensitive) @testing.fails_on_everything_except('sqlite', 'mysql', 'mssql') def test_reflect_case_insensitive(self): m = MetaData() t2 = Table("sOmEtAbLe", m, autoload=True, autoload_with=testing.db) eq_(t2.name, "sOmEtAbLe") class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase): __backend__ = True @classmethod def setup_class(cls): cls.metadata = MetaData() cls.to_reflect = Table( 'to_reflect', cls.metadata, Column('x', sa.Integer, primary_key=True), Column('y', sa.Integer), test_needs_fk=True ) cls.related = Table( 'related', cls.metadata, Column('q', sa.Integer, sa.ForeignKey('to_reflect.x')), test_needs_fk=True ) sa.Index("some_index", cls.to_reflect.c.y) cls.metadata.create_all(testing.db) @classmethod def teardown_class(cls): cls.metadata.drop_all(testing.db) def _do_test(self, col, update, assert_, tablename="to_reflect"): # load the actual Table class, not the test # wrapper from sqlalchemy.schema import Table m = MetaData(testing.db) def column_reflect(insp, table, column_info): if column_info['name'] == col: column_info.update(update) t = Table(tablename, m, autoload=True, listeners=[ ('column_reflect', column_reflect), ]) assert_(t) m = MetaData(testing.db) self.event_listen(Table, 'column_reflect', column_reflect) t2 = Table(tablename, m, autoload=True) assert_(t2) def test_override_key(self): def assertions(table): eq_(table.c.YXZ.name, "x") eq_(set(table.primary_key), set([table.c.YXZ])) self._do_test( "x", {"key": "YXZ"}, assertions ) def test_override_index(self): def assertions(table): idx = list(table.indexes)[0] eq_(idx.columns, [table.c.YXZ]) self._do_test( "y", {"key": "YXZ"}, assertions ) def test_override_key_fk(self): m = MetaData(testing.db) def column_reflect(insp, table, column_info): if column_info['name'] == 'q': column_info['key'] = 'qyz' elif column_info['name'] == 'x': column_info['key'] = 'xyz' to_reflect = Table("to_reflect", m, autoload=True, listeners=[ ('column_reflect', column_reflect), ]) related = Table("related", m, autoload=True, listeners=[ ('column_reflect', column_reflect), ]) assert related.c.qyz.references(to_reflect.c.xyz) def test_override_type(self): def assert_(table): assert isinstance(table.c.x.type, sa.String) self._do_test( "x", {"type": sa.String}, assert_ ) def test_override_info(self): self._do_test( "x", {"info": {"a": "b"}}, lambda table: eq_(table.c.x.info, {"a": "b"}) )
mit
datjwu/rbtools
rbtools/api/errors.py
4
1553
from __future__ import unicode_literals import six class APIError(Exception): def __init__(self, http_status, error_code, rsp=None, *args, **kwargs): Exception.__init__(self, *args, **kwargs) self.http_status = http_status self.error_code = error_code self.rsp = rsp def __str__(self): code_str = 'HTTP %d' % self.http_status if self.error_code: code_str += ', API Error %d' % self.error_code if self.rsp and 'err' in self.rsp: return '%s (%s)' % (self.rsp['err']['msg'], code_str) else: return code_str class AuthorizationError(APIError): pass class BadRequestError(APIError): def __str__(self): lines = [super(BadRequestError, self).__str__()] if self.rsp and 'fields' in self.rsp: lines.append('') for field, error in six.iteritems(self.rsp['fields']): lines.append(' %s: %s' % (field, '; '.join(error))) return '\n'.join(lines) class CacheError(Exception): """An exception for caching errors.""" class ServerInterfaceError(Exception): def __init__(self, msg, *args, **kwargs): Exception.__init__(self, *args, **kwargs) self.msg = msg def __str__(self): return self.msg API_ERROR_TYPE = { 400: BadRequestError, 401: AuthorizationError, } def create_api_error(http_status, *args, **kwargs): error_type = API_ERROR_TYPE.get(http_status, APIError) return error_type(http_status, *args, **kwargs)
mit
shakamunyi/tensorflow
tensorflow/contrib/framework/python/ops/audio_ops.py
80
1186
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=g-short-docstring-punctuation """Audio processing and decoding ops. @@decode_wav @@encode_wav @@audio_spectrogram @@mfcc """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # go/tf-wildcard-import # pylint: disable=wildcard-import from tensorflow.python.ops.gen_audio_ops import * # pylint: enable=wildcard-import from tensorflow.python.util.all_util import remove_undocumented remove_undocumented(__name__, [])
apache-2.0
wbond/subversion
tools/dist/release.py
2
22908
#!/usr/bin/env python # # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # About this script: # This script is intended to simplify creating Subversion releases, by # automating as much as is possible. It works well with our Apache # infrastructure, and should make rolling, posting, and announcing # releases dirt simple. # # This script may be run on a number of platforms, but it is intended to # be run on people.apache.org. As such, it may have dependencies (such # as Python version) which may not be common, but are guaranteed to be # available on people.apache.org. # It'd be kind of nice to use the Subversion python bindings in this script, # but people.apache.org doesn't currently have them installed # Stuff we need import os import re import sys import glob import shutil import urllib2 import hashlib import tarfile import logging import datetime import operator import itertools import subprocess import argparse # standard in Python 2.7 # Find ezt, using Subversion's copy, if there isn't one on the system. try: import ezt except ImportError: ezt_path = os.path.dirname(os.path.dirname(os.path.abspath(sys.path[0]))) ezt_path = os.path.join(ezt_path, 'build', 'generator') sys.path.append(ezt_path) import ezt # Our required / recommended versions autoconf_ver = '2.68' libtool_ver = '2.4' swig_ver = '2.0.4' # Some constants repos = 'http://svn.apache.org/repos/asf/subversion' people_host = 'minotaur.apache.org' people_dist_dir = '/www/www.apache.org/dist/subversion' #---------------------------------------------------------------------- # Utility functions class Version(object): regex = re.compile('(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?') def __init__(self, ver_str): match = self.regex.search(ver_str) if not match: raise RuntimeError("Bad version string '%s'" % ver_str) self.major = int(match.group(1)) self.minor = int(match.group(2)) self.patch = int(match.group(3)) if match.group(4): self.pre = match.group(4) self.pre_num = int(match.group(5)) else: self.pre = None self.pre_num = None self.base = '%d.%d.%d' % (self.major, self.minor, self.patch) def is_prerelease(self): return self.pre != None def __lt__(self, that): if self.major < that.major: return True if self.major > that.major: return False if self.minor < that.minor: return True if self.minor > that.minor: return False if self.patch < that.patch: return True if self.patch > that.patch: return False if not self.pre and not that.pre: return False if not self.pre and that.pre: return False if self.pre and not that.pre: return True # We are both pre-releases if self.pre != that.pre: return self.pre < that.pre else: return self.pre_num < that.pre_num def __str(self): if self.pre: extra = '-%s%d' % (self.pre, self.pre_num) else: extra = '' return self.base + extra def __repr__(self): return "Version('%s')" % self.__str() def __str__(self): return self.__str() def get_prefix(base_dir): return os.path.join(base_dir, 'prefix') def get_tempdir(base_dir): return os.path.join(base_dir, 'tempdir') def get_deploydir(base_dir): return os.path.join(base_dir, 'deploy') def get_tmpldir(): return os.path.join(os.path.abspath(sys.path[0]), 'templates') def get_tmplfile(filename): try: return open(os.path.join(get_tmpldir(), filename)) except IOError: # Hmm, we had a problem with the local version, let's try the repo return urllib2.urlopen(repos + '/trunk/tools/dist/templates/' + filename) def get_nullfile(): # This is certainly not cross platform return open('/dev/null', 'w') def run_script(verbose, script): if verbose: stdout = None stderr = None else: stdout = get_nullfile() stderr = subprocess.STDOUT for l in script.split('\n'): subprocess.check_call(l.split(), stdout=stdout, stderr=stderr) def download_file(url, target): response = urllib2.urlopen(url) target_file = open(target, 'w') target_file.write(response.read()) def assert_people(): if os.uname()[1] != people_host: raise RuntimeError('Not running on expected host "%s"' % people_host) #---------------------------------------------------------------------- # Cleaning up the environment def cleanup(args): 'Remove generated files and folders.' logging.info('Cleaning') shutil.rmtree(get_prefix(args.base_dir), True) shutil.rmtree(get_tempdir(args.base_dir), True) shutil.rmtree(get_deploydir(args.base_dir), True) #---------------------------------------------------------------------- # Creating an environment to roll the release class RollDep(object): 'The super class for each of the build dependencies.' def __init__(self, base_dir, use_existing, verbose): self._base_dir = base_dir self._use_existing = use_existing self._verbose = verbose def _test_version(self, cmd): proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) (stdout, stderr) = proc.communicate() rc = proc.wait() if rc: return '' return stdout.split('\n') def build(self): if not hasattr(self, '_extra_configure_flags'): self._extra_configure_flags = '' cwd = os.getcwd() tempdir = get_tempdir(self._base_dir) tarball = os.path.join(tempdir, self._filebase + '.tar.gz') if os.path.exists(tarball): if not self._use_existing: raise RuntimeError('autoconf tarball "%s" already exists' % tarball) logging.info('Using existing %s.tar.gz' % self._filebase) else: logging.info('Fetching %s' % self._filebase) download_file(self._url, tarball) # Extract tarball tarfile.open(tarball).extractall(tempdir) logging.info('Building ' + self.label) os.chdir(os.path.join(tempdir, self._filebase)) run_script(self._verbose, '''./configure --prefix=%s %s make make install''' % (get_prefix(self._base_dir), self._extra_configure_flags)) os.chdir(cwd) class AutoconfDep(RollDep): def __init__(self, base_dir, use_existing, verbose): RollDep.__init__(self, base_dir, use_existing, verbose) self.label = 'autoconf' self._filebase = 'autoconf-' + autoconf_ver self._url = 'http://ftp.gnu.org/gnu/autoconf/%s.tar.gz' % self._filebase def have_usable(self): output = self._test_version(['autoconf', '-V']) if not output: return False version = output[0].split()[-1:][0] return version == autoconf_ver def use_system(self): if not self._use_existing: return False return self.have_usable() class LibtoolDep(RollDep): def __init__(self, base_dir, use_existing, verbose): RollDep.__init__(self, base_dir, use_existing, verbose) self.label = 'libtool' self._filebase = 'libtool-' + libtool_ver self._url = 'http://ftp.gnu.org/gnu/libtool/%s.tar.gz' % self._filebase def have_usable(self): output = self._test_version(['libtool', '--version']) if not output: return False version = output[0].split()[-1:][0] return version == libtool_ver def use_system(self): # We unconditionally return False here, to avoid using a borked # system libtool (I'm looking at you, Debian). return False class SwigDep(RollDep): def __init__(self, base_dir, use_existing, verbose, sf_mirror): RollDep.__init__(self, base_dir, use_existing, verbose) self.label = 'swig' self._filebase = 'swig-' + swig_ver self._url = 'http://sourceforge.net/projects/swig/files/swig/%(swig)s/%(swig)s.tar.gz/download?use_mirror=%(sf_mirror)s' % \ { 'swig' : self._filebase, 'sf_mirror' : sf_mirror } self._extra_configure_flags = '--without-pcre' def have_usable(self): output = self._test_version(['swig', '-version']) if not output: return False version = output[1].split()[-1:][0] return version == swig_ver def use_system(self): if not self._use_existing: return False return self.have_usable() def build_env(args): 'Download prerequisites for a release and prepare the environment.' logging.info('Creating release environment') try: os.mkdir(get_prefix(args.base_dir)) os.mkdir(get_tempdir(args.base_dir)) except OSError: if not args.use_existing: raise autoconf = AutoconfDep(args.base_dir, args.use_existing, args.verbose) libtool = LibtoolDep(args.base_dir, args.use_existing, args.verbose) swig = SwigDep(args.base_dir, args.use_existing, args.verbose, args.sf_mirror) # iterate over our rolling deps, and build them if needed for dep in [autoconf, libtool, swig]: if dep.use_system(): logging.info('Using system %s' % dep.label) else: dep.build() #---------------------------------------------------------------------- # Create release artifacts def roll_tarballs(args): 'Create the release artifacts.' extns = ['zip', 'tar.gz', 'tar.bz2'] if args.branch: branch = args.branch else: branch = args.version.base[:-1] + 'x' logging.info('Rolling release %s from branch %s@%d' % (args.version, branch, args.revnum)) # Ensure we've got the appropriate rolling dependencies available autoconf = AutoconfDep(args.base_dir, False, args.verbose) libtool = LibtoolDep(args.base_dir, False, args.verbose) swig = SwigDep(args.base_dir, False, args.verbose, None) for dep in [autoconf, libtool, swig]: if not dep.have_usable(): raise RuntimeError('Cannot find usable %s' % dep.label) # Make sure CHANGES is sync'd if branch != 'trunk': trunk_CHANGES = '%s/trunk/CHANGES@%d' % (repos, args.revnum) branch_CHANGES = '%s/branches/%s/CHANGES@%d' % (repos, branch, args.revnum) proc = subprocess.Popen(['svn', 'diff', '--summarize', branch_CHANGES, trunk_CHANGES], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) (stdout, stderr) = proc.communicate() proc.wait() if stdout: raise RuntimeError('CHANGES not synced between trunk and branch') # Create the output directory if not os.path.exists(get_deploydir(args.base_dir)): os.mkdir(get_deploydir(args.base_dir)) # For now, just delegate to dist.sh to create the actual artifacts extra_args = '' if args.version.is_prerelease(): extra_args = '-%s %d' % (args.version.pre, args.version.pre_num) logging.info('Building UNIX tarballs') run_script(args.verbose, '%s/dist.sh -v %s -pr %s -r %d %s' % (sys.path[0], args.version.base, branch, args.revnum, extra_args) ) logging.info('Buildling Windows tarballs') run_script(args.verbose, '%s/dist.sh -v %s -pr %s -r %d -zip %s' % (sys.path[0], args.version.base, branch, args.revnum, extra_args) ) # Move the results to the deploy directory logging.info('Moving artifacts and calculating checksums') for e in extns: if args.version.pre == 'nightly': filename = 'subversion-trunk.%s' % e else: filename = 'subversion-%s.%s' % (args.version, e) shutil.move(filename, get_deploydir(args.base_dir)) filename = os.path.join(get_deploydir(args.base_dir), filename) m = hashlib.sha1() m.update(open(filename, 'r').read()) open(filename + '.sha1', 'w').write(m.hexdigest()) shutil.move('svn_version.h.dist', get_deploydir(args.base_dir)) # And we're done! #---------------------------------------------------------------------- # Post the candidate release artifacts def post_candidates(args): 'Post the generated tarballs to web-accessible directory.' if args.target: target = args.target else: target = os.path.join(os.getenv('HOME'), 'public_html', 'svn', args.version) if args.code_name: dirname = args.code_name else: dirname = 'deploy' if not os.path.exists(target): os.makedirs(target) data = { 'version' : args.version, 'revnum' : args.revnum, 'dirname' : dirname, } # Choose the right template text if args.version.is_prerelease(): if args.version.pre == 'nightly': template_filename = 'nightly-candidates.ezt' else: template_filename = 'rc-candidates.ezt' else: template_filename = 'stable-candidates.ezt' template = ezt.Template() template.parse(get_tmplfile(template_filename).read()) template.generate(open(os.path.join(target, 'index.html'), 'w'), data) logging.info('Moving tarballs to %s' % os.path.join(target, dirname)) if os.path.exists(os.path.join(target, dirname)): shutil.rmtree(os.path.join(target, dirname)) shutil.copytree(get_deploydir(args.base_dir), os.path.join(target, dirname)) #---------------------------------------------------------------------- # Clean dist def clean_dist(args): 'Clean the distribution directory of all but the most recent artifacts.' regex = re.compile('subversion-(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?') if not args.dist_dir: assert_people() args.dist_dir = people_dist_dir logging.info('Cleaning dist dir \'%s\'' % args.dist_dir) filenames = glob.glob(os.path.join(args.dist_dir, 'subversion-*.tar.gz')) versions = [] for filename in filenames: versions.append(Version(filename)) for k, g in itertools.groupby(sorted(versions), lambda x: (x.major, x.minor)): releases = list(g) logging.info("Saving release '%s'", releases[-1]) for r in releases[:-1]: for filename in glob.glob(os.path.join(args.dist_dir, 'subversion-%s.*' % r)): logging.info("Removing '%s'" % filename) os.remove(filename) #---------------------------------------------------------------------- # Write announcements def write_news(args): 'Write text for the Subversion website.' data = { 'date' : datetime.date.today().strftime('%Y%m%d'), 'date_pres' : datetime.date.today().strftime('%Y-%m-%d'), 'version' : str(args.version), 'version_base' : args.version.base, } if args.version.is_prerelease(): template_filename = 'rc-news.ezt' else: template_filename = 'stable-news.ezt' template = ezt.Template() template.parse(get_tmplfile(template_filename).read()) template.generate(sys.stdout, data) def get_sha1info(args): 'Return a list of sha1 info for the release' sha1s = glob.glob(os.path.join(get_deploydir(args.base_dir), '*.sha1')) class info(object): pass sha1info = [] for s in sha1s: i = info() i.filename = os.path.basename(s)[:-5] i.sha1 = open(s, 'r').read() sha1info.append(i) return sha1info def write_announcement(args): 'Write the release announcement.' sha1info = get_sha1info(args) data = { 'version' : args.version, 'sha1info' : sha1info, 'siginfo' : open('getsigs-output', 'r').read(), 'major-minor' : args.version.base[:3], 'major-minor-patch' : args.version.base, } if args.version.is_prerelease(): template_filename = 'rc-release-ann.ezt' else: template_filename = 'stable-release-ann.ezt' template = ezt.Template(compress_whitespace = False) template.parse(get_tmplfile(template_filename).read()) template.generate(sys.stdout, data) #---------------------------------------------------------------------- # Main entry point for argument parsing and handling def main(): 'Parse arguments, and drive the appropriate subcommand.' # Setup our main parser parser = argparse.ArgumentParser( description='Create an Apache Subversion release.') parser.add_argument('--clean', action='store_true', default=False, help='Remove any directories previously created by %(prog)s') parser.add_argument('--verbose', action='store_true', default=False, help='Increase output verbosity') parser.add_argument('--base-dir', default=os.getcwd(), help='''The directory in which to create needed files and folders. The default is the current working directory.''') subparsers = parser.add_subparsers(title='subcommands') # Setup the parser for the build-env subcommand subparser = subparsers.add_parser('build-env', help='''Download release prerequisistes, including autoconf, libtool, and swig.''') subparser.set_defaults(func=build_env) subparser.add_argument('--sf-mirror', default='softlayer', help='''The mirror to use for downloading files from SourceForge. If in the EU, you may want to use 'kent' for this value.''') subparser.add_argument('--use-existing', action='store_true', default=False, help='''Attempt to use existing build dependencies before downloading and building a private set.''') # Setup the parser for the roll subcommand subparser = subparsers.add_parser('roll', help='''Create the release artifacts.''') subparser.set_defaults(func=roll_tarballs) subparser.add_argument('version', type=Version, help='''The release label, such as '1.7.0-alpha1'.''') subparser.add_argument('revnum', type=int, help='''The revision number to base the release on.''') subparser.add_argument('--branch', help='''The branch to base the release on.''') # Setup the parser for the post-candidates subcommand subparser = subparsers.add_parser('post-candidates', help='''Build the website to host the candidate tarballs. The default location is somewhere in ~/public_html. ''') subparser.set_defaults(func=post_candidates) subparser.add_argument('version', type=Version, help='''The release label, such as '1.7.0-alpha1'.''') subparser.add_argument('revnum', type=int, help='''The revision number to base the release on.''') subparser.add_argument('--target', help='''The full path to the destination.''') subparser.add_argument('--code-name', help='''A whimsical name for the release, used only for naming the download directory.''') # The clean-dist subcommand subparser = subparsers.add_parser('clean-dist', help='''Clean the distribution directory (and mirrors) of all but the most recent MAJOR.MINOR release. If no dist-dir is given, this command will assume it is running on people.apache.org.''') subparser.set_defaults(func=clean_dist) subparser.add_argument('--dist-dir', help='''The directory to clean.''') # The write-news subcommand subparser = subparsers.add_parser('write-news', help='''Output to stdout template text for use in the news section of the Subversion website.''') subparser.set_defaults(func=write_news) subparser.add_argument('version', type=Version, help='''The release label, such as '1.7.0-alpha1'.''') subparser = subparsers.add_parser('write-announcement', help='''Output to stdout template text for the emailed release announcement.''') subparser.set_defaults(func=write_announcement) subparser.add_argument('version', type=Version, help='''The release label, such as '1.7.0-alpha1'.''') # A meta-target subparser = subparsers.add_parser('clean', help='''The same as the '--clean' switch, but as a separate subcommand.''') subparser.set_defaults(func=cleanup) # Parse the arguments args = parser.parse_args() # first, process any global operations if args.clean: cleanup(args) # Set up logging logger = logging.getLogger() if args.verbose: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) # Fix up our path so we can use our installed versions os.environ['PATH'] = os.path.join(get_prefix(args.base_dir), 'bin') + ':' \ + os.environ['PATH'] # finally, run the subcommand, and give it the parsed arguments args.func(args) if __name__ == '__main__': main()
apache-2.0
rouault/Quantum-GIS
tests/src/python/test_qgsserver_wms_getlegendgraphic.py
4
18330
# -*- coding: utf-8 -*- """QGIS Unit tests for QgsServer WMS. From build dir, run: ctest -R PyQgsServerWMS -V .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'Alessandro Pasotti' __date__ = '25/05/2015' __copyright__ = 'Copyright 2015, The QGIS Project' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os # Needed on Qt 5 so that the serialization of XML is consistent among all executions os.environ['QT_HASH_SEED'] = '1' import re import urllib.request import urllib.parse import urllib.error from qgis.testing import unittest from qgis.PyQt.QtCore import QSize import osgeo.gdal # NOQA from test_qgsserver import QgsServerTestBase from qgis.core import QgsProject # Strip path and content length because path may vary RE_STRIP_UNCHECKABLE = b'MAP=[^"]+|Content-Length: \d+' RE_ATTRIBUTES = b'[^>\s]+=[^>\s]+' class TestQgsServerWMSGetLegendGraphic(QgsServerTestBase): """QGIS Server WMS Tests for GetLegendGraphic request""" def test_getLegendGraphics(self): """Test that does not return an exception but an image""" parms = { 'MAP': self.testdata_path + "test_project.qgs", 'SERVICE': 'WMS', 'VERSION': '1.3.0', 'REQUEST': 'GetLegendGraphic', 'FORMAT': 'image/png', # 'WIDTH': '20', # optional # 'HEIGHT': '20', # optional 'LAYER': 'testlayer%20èé', } qs = '?' + '&'.join(["%s=%s" % (k, v) for k, v in parms.items()]) h, r = self._execute_request(qs) self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r)) self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r)) def test_getLegendGraphics_invalid_parameters(self): """Test that does return an exception""" qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello,db_point", "LAYERTITLE": "FALSE", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "RULE": "1", "BBOX": "-151.7,-38.9,51.0,78.0", "CRS": "EPSG:4326" }.items())]) r, h = self._result(self._execute_request(qs)) err = b"BBOX parameter cannot be combined with RULE" in r self.assertTrue(err) def test_wms_GetLegendGraphic_LayerSpace(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "FORMAT": "image/png", # "HEIGHT": "500", # "WIDTH": "500", "LAYERSPACE": "50.0", "LAYERFONTBOLD": "TRUE", "LAYERFONTSIZE": "30", "ITEMFONTBOLD": "TRUE", "ITEMFONTSIZE": "20", "LAYERFONTFAMILY": self.fontFamily, "ITEMFONTFAMILY": self.fontFamily, "LAYERTITLE": "TRUE", "CRS": "EPSG:3857" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerSpace") def test_wms_GetLegendGraphic_ShowFeatureCount(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "FORMAT": "image/png", # "HEIGHT": "500", # "WIDTH": "500", "LAYERTITLE": "TRUE", "LAYERFONTBOLD": "TRUE", "LAYERFONTSIZE": "30", "LAYERFONTFAMILY": self.fontFamily, "ITEMFONTFAMILY": self.fontFamily, "ITEMFONTBOLD": "TRUE", "ITEMFONTSIZE": "20", "SHOWFEATURECOUNT": "TRUE", "CRS": "EPSG:3857" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_ShowFeatureCount", max_size_diff=QSize(1, 1)) def test_getLegendGraphics_layertitle(self): """Test that does not return an exception but an image""" print("TEST FONT FAMILY: ", self.fontFamily) parms = { 'MAP': self.testdata_path + "test_project.qgs", 'SERVICE': 'WMS', 'VERSION': '1.3.0', 'REQUEST': 'GetLegendGraphic', 'FORMAT': 'image/png', # 'WIDTH': '20', # optional # 'HEIGHT': '20', # optional 'LAYER': u'testlayer%20èé', 'LAYERFONTBOLD': 'TRUE', 'LAYERFONTSIZE': '30', 'ITEMFONTBOLD': 'TRUE', 'LAYERFONTFAMILY': self.fontFamily, 'ITEMFONTFAMILY': self.fontFamily, 'ITEMFONTSIZE': '20', 'LAYERTITLE': 'TRUE', } qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_test", 250, QSize(15, 15)) parms = { 'MAP': self.testdata_path + "test_project.qgs", 'SERVICE': 'WMS', 'VERSION': '1.3.0', 'REQUEST': 'GetLegendGraphic', 'FORMAT': 'image/png', # 'WIDTH': '20', # optional # 'HEIGHT': '20', # optional 'LAYER': u'testlayer%20èé', 'LAYERTITLE': 'FALSE', } qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_test_layertitle_false", 250, QSize(15, 15)) def test_getLegendGraphics_rulelabel(self): """Test that does not return an exception but an image""" parms = { 'MAP': self.testdata_path + "test_project.qgs", 'SERVICE': 'WMS', 'VERSION': '1.3.0', 'REQUEST': 'GetLegendGraphic', 'FORMAT': 'image/png', 'LAYER': u'testlayer%20èé', 'LAYERFONTBOLD': 'TRUE', 'LAYERFONTSIZE': '30', 'LAYERFONTFAMILY': self.fontFamily, 'ITEMFONTFAMILY': self.fontFamily, 'ITEMFONTBOLD': 'TRUE', 'ITEMFONTSIZE': '20', 'RULELABEL': 'TRUE', } qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_test", 250, QSize(15, 15)) parms = { 'MAP': self.testdata_path + "test_project.qgs", 'SERVICE': 'WMS', 'VERSION': '1.3.0', 'REQUEST': 'GetLegendGraphic', 'FORMAT': 'image/png', 'LAYER': u'testlayer%20èé', 'LAYERFONTBOLD': 'TRUE', 'LAYERFONTSIZE': '30', 'ITEMFONTBOLD': 'TRUE', 'ITEMFONTSIZE': '20', 'LAYERFONTFAMILY': self.fontFamily, 'ITEMFONTFAMILY': self.fontFamily, 'RULELABEL': 'FALSE', } qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_rulelabel_false", 250, QSize(15, 15)) def test_getLegendGraphics_rule(self): """Test that does not return an exception but an image""" parms = { 'MAP': self.testdata_path + "test_project_legend_rule.qgs", 'SERVICE': 'WMS', 'VERSION': '1.3.0', 'REQUEST': 'GetLegendGraphic', 'FORMAT': 'image/png', 'LAYER': u'testlayer%20èé', 'WIDTH': '20', 'HEIGHT': '20', 'RULE': 'rule0', } qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_rule0", 250, QSize(15, 15)) parms = { 'MAP': self.testdata_path + "test_project_legend_rule.qgs", 'SERVICE': 'WMS', 'VERSION': '1.3.0', 'REQUEST': 'GetLegendGraphic', 'FORMAT': 'image/png', 'LAYER': u'testlayer%20èé', 'WIDTH': '20', 'HEIGHT': '20', 'RULE': 'rule1', } qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_rule1", 250, QSize(15, 15)) def test_wms_GetLegendGraphic_Basic(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "FALSE", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_Basic") def test_wms_GetLegendGraphic_Transparent(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "FALSE", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857", "TRANSPARENT": "TRUE" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_Transparent") def test_wms_GetLegendGraphic_Background(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "FALSE", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857", "BGCOLOR": "green" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_Background") qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "FALSE", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857", "BGCOLOR": "0x008000" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_Background_Hex") def test_wms_GetLegendGraphic_BoxSpace(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "FALSE", "BOXSPACE": "100", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_BoxSpace") def test_wms_GetLegendGraphic_SymbolSpace(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "FALSE", "SYMBOLSPACE": "100", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_SymbolSpace") def test_wms_GetLegendGraphic_IconLabelSpace(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "FALSE", "ICONLABELSPACE": "100", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_IconLabelSpace") def test_wms_GetLegendGraphic_SymbolSize(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "FALSE", "SYMBOLWIDTH": "50", "SYMBOLHEIGHT": "30", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_SymbolSize") def test_wms_GetLegendGraphic_LayerFont(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "TRUE", "LAYERFONTBOLD": "TRUE", "LAYERFONTITALIC": "TRUE", "LAYERFONTSIZE": "30", "ITEMFONTBOLD": "TRUE", "ITEMFONTSIZE": "20", "LAYERFONTFAMILY": self.fontFamily, "ITEMFONTFAMILY": self.fontFamily, "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerFont", max_size_diff=QSize(1, 1)) def test_wms_GetLegendGraphic_ItemFont(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello", "LAYERTITLE": "TRUE", "LAYERFONTBOLD": "TRUE", "LAYERFONTSIZE": "30", "ITEMFONTBOLD": "TRUE", "ITEMFONTITALIC": "TRUE", "ITEMFONTSIZE": "20", "LAYERFONTFAMILY": self.fontFamily, "ITEMFONTFAMILY": self.fontFamily, "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "CRS": "EPSG:3857" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_ItemFont", max_size_diff=QSize(1, 1)) def test_wms_GetLegendGraphic_BBox(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello,db_point", "LAYERTITLE": "FALSE", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "BBOX": "-151.7,-38.9,51.0,78.0", "CRS": "EPSG:4326" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox") def test_wms_GetLegendGraphic_BBox2(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": urllib.parse.quote(self.projectPath), "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "Country,Hello,db_point", "LAYERTITLE": "FALSE", "FORMAT": "image/png", "HEIGHT": "500", "WIDTH": "500", "BBOX": "-76.08,-6.4,-19.38,38.04", "SRS": "EPSG:4326" }.items())]) r, h = self._result(self._execute_request(qs)) self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox2") def test_wms_GetLegendGraphic_EmptyLegend(self): qs = "?" + "&".join(["%s=%s" % i for i in list({ "MAP": self.testdata_path + 'test_project_contextual_legend.qgs', "SERVICE": "WMS", "VERSION": "1.1.1", "REQUEST": "GetLegendGraphic", "LAYER": "QGIS%20Server%20Hello%20World", "FORMAT": "image/png", "HEIGHT": "840", "WIDTH": "1226", "BBOX": "10.38450,-49.6370,73.8183,42.9461", "SRS": "EPSG:4326", "SCALE": "15466642" }.items())]) h, r = self._execute_request(qs) self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r)) self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r)) if __name__ == '__main__': unittest.main()
gpl-2.0
stephane-martin/salt-debian-packaging
salt-2016.3.3/salt/modules/rvm.py
2
11310
# -*- coding: utf-8 -*- ''' Manage ruby installations and gemsets with RVM, the Ruby Version Manager. ''' from __future__ import absolute_import # Import python libs import re import os import logging # Import salt libs import salt.utils from salt.exceptions import CommandExecutionError log = logging.getLogger(__name__) # Don't shadow built-in's. __func_alias__ = { 'list_': 'list' } __opts__ = { 'rvm.runas': None, } def _get_rvm_location(runas=None): if runas: runas_home = os.path.expanduser('~{0}'.format(runas)) rvmpath = '{0}/.rvm/bin/rvm'.format(runas_home) if os.path.exists(rvmpath): return [rvmpath] return ['/usr/local/rvm/bin/rvm'] def _rvm(command, runas=None, cwd=None): if runas is None: runas = __salt__['config.option']('rvm.runas') if not is_installed(runas): return False cmd = _get_rvm_location(runas) + command ret = __salt__['cmd.run_all'](cmd, runas=runas, cwd=cwd, python_shell=False) if ret['retcode'] == 0: return ret['stdout'] return False def _rvm_do(ruby, command, runas=None, cwd=None): return _rvm([ruby or 'default', 'do'] + command, runas=runas, cwd=cwd) def is_installed(runas=None): ''' Check if RVM is installed. CLI Example: .. code-block:: bash salt '*' rvm.is_installed ''' try: return __salt__['cmd.has_exec'](_get_rvm_location(runas)[0]) except IndexError: return False def install(runas=None): ''' Install RVM system-wide runas The user under which to run the rvm installer script. If not specified, then it be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.install ''' # RVM dependencies on Ubuntu 10.04: # bash coreutils gzip bzip2 gawk sed curl git-core subversion installer = 'https://raw.githubusercontent.com/rvm/rvm/master/binscripts/rvm-installer' ret = __salt__['cmd.run_all']( # the RVM installer automatically does a multi-user install when it is # invoked with root privileges 'curl -Ls {installer} | bash -s stable'.format(installer=installer), runas=runas, python_shell=True ) if ret['retcode'] > 0: msg = 'Error encountered while downloading the RVM installer' if ret['stderr']: msg += '. stderr follows:\n\n' + ret['stderr'] raise CommandExecutionError(msg) return True def install_ruby(ruby, runas=None): ''' Install a ruby implementation. ruby The version of ruby to install runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.install_ruby 1.9.3-p385 ''' # MRI/RBX/REE dependencies for Ubuntu 10.04: # build-essential openssl libreadline6 libreadline6-dev curl # git-core zlib1g zlib1g-dev libssl-dev libyaml-dev libsqlite3-0 # libsqlite3-dev sqlite3 libxml2-dev libxslt1-dev autoconf libc6-dev # libncurses5-dev automake libtool bison subversion ruby if runas and runas != 'root': _rvm(['autolibs', 'disable', ruby], runas=runas) return _rvm(['install', '--disable-binary', ruby], runas=runas) else: return _rvm(['install', ruby], runas=runas) def reinstall_ruby(ruby, runas=None): ''' Reinstall a ruby implementation ruby The version of ruby to reinstall runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.reinstall_ruby 1.9.3-p385 ''' return _rvm(['reinstall', ruby], runas=runas) def list_(runas=None): ''' List all rvm-installed rubies runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.list ''' rubies = [] output = _rvm(['list'], runas=runas) if output: regex = re.compile(r'^[= ]([*> ]) ([^- ]+)-([^ ]+) \[ (.*) \]') for line in output.splitlines(): match = regex.match(line) if match: rubies.append([ match.group(2), match.group(3), match.group(1) == '*' ]) return rubies def set_default(ruby, runas=None): ''' Set the default ruby ruby The version of ruby to make the default runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.set_default 2.0.0 ''' return _rvm(['alias', 'create', 'default', ruby], runas=runas) def get(version='stable', runas=None): ''' Update RVM version : stable Which version of RVM to install, (e.g. stable or head) CLI Example: .. code-block:: bash salt '*' rvm.get ''' return _rvm(['get', version], runas=runas) def wrapper(ruby_string, wrapper_prefix, runas=None, *binaries): ''' Install RVM wrapper scripts ruby_string Ruby/gemset to install wrappers for wrapper_prefix What to prepend to the name of the generated wrapper binaries runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. binaries : None The names of the binaries to create wrappers for. When nothing is given, wrappers for ruby, gem, rake, irb, rdoc, ri and testrb are generated. CLI Example: .. code-block:: bash salt '*' rvm.wrapper <ruby_string> <wrapper_prefix> ''' cmd = ['wrapper', ruby_string, wrapper_prefix] cmd.extend(binaries) return _rvm(cmd, runas=runas) def rubygems(ruby, version, runas=None): ''' Installs a specific rubygems version in the given ruby ruby The ruby for which to install rubygems version The version of rubygems to install, or 'remove' to use the version that ships with 1.9 runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.rubygems 2.0.0 1.8.24 ''' return _rvm_do(ruby, ['rubygems', version], runas=runas) def gemset_create(ruby, gemset, runas=None): ''' Creates a gemset. ruby The ruby version for which to create the gemset gemset The name of the gemset to create runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.gemset_create 2.0.0 foobar ''' return _rvm_do(ruby, ['rvm', 'gemset', 'create', gemset], runas=runas) def gemset_list(ruby='default', runas=None): ''' List all gemsets for the given ruby. ruby : default The ruby version for which to list the gemsets runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.gemset_list ''' gemsets = [] output = _rvm_do(ruby, ['rvm', 'gemset', 'list'], runas=runas) if output: regex = re.compile('^ ([^ ]+)') for line in output.splitlines(): match = regex.match(line) if match: gemsets.append(match.group(1)) return gemsets def gemset_delete(ruby, gemset, runas=None): ''' Delete a gemset ruby The ruby version to which the gemset belongs gemset The gemset to delete runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.gemset_delete 2.0.0 foobar ''' return _rvm_do(ruby, ['rvm', '--force', 'gemset', 'delete', gemset], runas=runas) def gemset_empty(ruby, gemset, runas=None): ''' Remove all gems from a gemset. ruby The ruby version to which the gemset belongs gemset The gemset to empty runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.gemset_empty 2.0.0 foobar ''' return _rvm_do(ruby, ['rvm', '--force', 'gemset', 'empty', gemset], runas=runas) def gemset_copy(source, destination, runas=None): ''' Copy all gems from one gemset to another. source The name of the gemset to copy, complete with ruby version destination The destination gemset runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.gemset_copy foobar bazquo ''' return _rvm(['gemset', 'copy', source, destination], runas=runas) def gemset_list_all(runas=None): ''' List all gemsets for all installed rubies. Note that you must have set a default ruby before this can work. runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rvm.gemset_list_all ''' gemsets = {} current_ruby = None output = _rvm_do('default', ['rvm', 'gemset', 'list_all'], runas=runas) if output: gems_regex = re.compile('^ ([^ ]+)') gemset_regex = re.compile('^gemsets for ([^ ]+)') for line in output.splitlines(): match = gemset_regex.match(line) if match: current_ruby = match.group(1) gemsets[current_ruby] = [] match = gems_regex.match(line) if match: gemsets[current_ruby].append(match.group(1)) return gemsets def do(ruby, command, runas=None, cwd=None): # pylint: disable=C0103 ''' Execute a command in an RVM controlled environment. ruby Which ruby to use command The rvm command to execute runas The user under which to run rvm. If not specified, then rvm will be run as the user under which Salt is running. cwd The directory from which to run the rvm command. Defaults to the user's home directory. CLI Example: .. code-block:: bash salt '*' rvm.do 2.0.0 <command> ''' try: command = salt.utils.shlex_split(command) except AttributeError: command = salt.utils.shlex_split(str(command)) return _rvm_do(ruby, command, runas=runas, cwd=cwd)
apache-2.0
dwalton76/ev3dev-lang-python
tests/motor/motor_param_unittest.py
1
24419
#!/usr/bin/env python # Based on the parameterized test case technique described here: # # http://eli.thegreenplace.net/2011/08/02/python-unit-testing-parametrized-test-cases import unittest import time import sys import ev3dev.ev3 as ev3 import parameterizedtestcase as ptc from motor_info import motor_info class TestTachoMotorAddressValue(ptc.ParameterizedTestCase): def test_address_value(self): self.assertEqual(self._param['motor'].address, self._param['port']) def test_address_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].address = "ThisShouldNotWork" class TestTachoMotorCommandsValue(ptc.ParameterizedTestCase): def test_commands_value(self): self.assertTrue(self._param['motor'].commands == self._param['commands']) def test_commands_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].commands = "ThisShouldNotWork" class TestTachoMotorCountPerRotValue(ptc.ParameterizedTestCase): def test_count_per_rot_value(self): self.assertEqual(self._param['motor'].count_per_rot, motor_info[self._param['motor'].driver_name]['count_per_rot']) def test_count_per_rot_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].count_per_rot = "ThisShouldNotWork" class TestTachoMotorCountPerMValue(ptc.ParameterizedTestCase): def test_count_per_m_value(self): self.assertEqual(self._param['motor'].count_per_m, motor_info[self._param['motor'].driver_name]['count_per_m']) def test_count_per_m_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].count_per_m = "ThisShouldNotWork" class TestTachoMotorFullTravelCountValue(ptc.ParameterizedTestCase): def test_full_travel_count_value(self): self.assertEqual(self._param['motor'].full_travel_count, motor_info[self._param['motor'].driver_name]['full_travel_count']) def test_full_travel_count_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].count_per_m = "ThisShouldNotWork" class TestTachoMotorDriverNameValue(ptc.ParameterizedTestCase): def test_driver_name_value(self): self.assertEqual(self._param['motor'].driver_name, self._param['driver_name']) def test_driver_name_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].driver_name = "ThisShouldNotWork" class TestTachoMotorDutyCycleValue(ptc.ParameterizedTestCase): def test_duty_cycle_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].duty_cycle = "ThisShouldNotWork" def test_duty_cycle_value_after_reset(self): self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].duty_cycle, 0) class TestTachoMotorDutyCycleSpValue(ptc.ParameterizedTestCase): def test_duty_cycle_sp_large_negative(self): with self.assertRaises(IOError): self._param['motor'].duty_cycle_sp = -101 def test_duty_cycle_sp_max_negative(self): self._param['motor'].duty_cycle_sp = -100 self.assertEqual(self._param['motor'].duty_cycle_sp, -100) def test_duty_cycle_sp_min_negative(self): self._param['motor'].duty_cycle_sp = -1 self.assertEqual(self._param['motor'].duty_cycle_sp, -1) def test_duty_cycle_sp_zero(self): self._param['motor'].duty_cycle_sp = 0 self.assertEqual(self._param['motor'].duty_cycle_sp, 0) def test_duty_cycle_sp_min_positive(self): self._param['motor'].duty_cycle_sp = 1 self.assertEqual(self._param['motor'].duty_cycle_sp, 1) def test_duty_cycle_sp_max_positive(self): self._param['motor'].duty_cycle_sp = 100 self.assertEqual(self._param['motor'].duty_cycle_sp, 100) def test_duty_cycle_sp_large_positive(self): with self.assertRaises(IOError): self._param['motor'].duty_cycle_sp = 101 def test_duty_cycle_sp_after_reset(self): self._param['motor'].duty_cycle_sp = 100 self.assertEqual(self._param['motor'].duty_cycle_sp, 100) self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].duty_cycle_sp, 0) class TestTachoMotorMaxSpeedValue(ptc.ParameterizedTestCase): def test_max_speed_value(self): self.assertEqual(self._param['motor'].max_speed, motor_info[self._param['motor'].driver_name]['max_speed']) def test_max_speed_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].max_speed = "ThisShouldNotWork" class TestTachoMotorPositionPValue(ptc.ParameterizedTestCase): def test_position_p_negative(self): with self.assertRaises(IOError): self._param['motor'].position_p = -1 def test_position_p_zero(self): self._param['motor'].position_p = 0 self.assertEqual(self._param['motor'].position_p, 0) def test_position_p_positive(self): self._param['motor'].position_p = 1 self.assertEqual(self._param['motor'].position_p, 1) def test_position_p_after_reset(self): self._param['motor'].position_p = 1 self._param['motor'].command = 'reset' if self._param['hold_pid']: expected = self._param['hold_pid']['kP'] else: expected = motor_info[self._param['motor'].driver_name]['position_p'] self.assertEqual(self._param['motor'].position_p, expected) class TestTachoMotorPositionIValue(ptc.ParameterizedTestCase): def test_position_i_negative(self): with self.assertRaises(IOError): self._param['motor'].position_i = -1 def test_position_i_zero(self): self._param['motor'].position_i = 0 self.assertEqual(self._param['motor'].position_i, 0) def test_position_i_positive(self): self._param['motor'].position_i = 1 self.assertEqual(self._param['motor'].position_i, 1) def test_position_i_after_reset(self): self._param['motor'].position_i = 1 self._param['motor'].command = 'reset' if self._param['hold_pid']: expected = self._param['hold_pid']['kI'] else: expected = motor_info[self._param['motor'].driver_name]['position_i'] self.assertEqual(self._param['motor'].position_i, expected) class TestTachoMotorPositionDValue(ptc.ParameterizedTestCase): def test_position_d_negative(self): with self.assertRaises(IOError): self._param['motor'].position_d = -1 def test_position_d_zero(self): self._param['motor'].position_d = 0 self.assertEqual(self._param['motor'].position_d, 0) def test_position_d_positive(self): self._param['motor'].position_d = 1 self.assertEqual(self._param['motor'].position_d, 1) def test_position_d_after_reset(self): self._param['motor'].position_d = 1 self._param['motor'].command = 'reset' if self._param['hold_pid']: expected = self._param['hold_pid']['kD'] else: expected = motor_info[self._param['motor'].driver_name]['position_d'] self.assertEqual(self._param['motor'].position_d, expected) class TestTachoMotorPolarityValue(ptc.ParameterizedTestCase): def test_polarity_normal_value(self): self._param['motor'].polarity = 'normal' self.assertEqual(self._param['motor'].polarity, 'normal') def test_polarity_inversed_value(self): self._param['motor'].polarity = 'inversed' self.assertEqual(self._param['motor'].polarity, 'inversed') def test_polarity_illegal_value(self): with self.assertRaises(IOError): self._param['motor'].polarity = "ThisShouldNotWork" def test_polarity_after_reset(self): if 'normal' == motor_info[self._param['motor'].driver_name]['polarity']: self._param['motor'].polarity = 'inversed' else: self._param['motor'].polarity = 'normal' self._param['motor'].command = 'reset' if 'normal' == motor_info[self._param['motor'].driver_name]['polarity']: self.assertEqual(self._param['motor'].polarity, 'normal') else: self.assertEqual(self._param['motor'].polarity, 'inversed') class TestTachoMotorPositionValue(ptc.ParameterizedTestCase): def test_position_large_negative(self): self._param['motor'].position = -1000000 self.assertEqual(self._param['motor'].position, -1000000) def test_position_min_negative(self): self._param['motor'].position = -1 self.assertEqual(self._param['motor'].position, -1) def test_position_zero(self): self._param['motor'].position = 0 self.assertEqual(self._param['motor'].position, 0) def test_position_min_positive(self): self._param['motor'].position = 1 self.assertEqual(self._param['motor'].position, 1) def test_position_large_positive(self): self._param['motor'].position = 1000000 self.assertEqual(self._param['motor'].position, 1000000) def test_position_after_reset(self): self._param['motor'].position = 100 self.assertEqual(self._param['motor'].position, 100) self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].position, 0) class TestTachoMotorPositionSpValue(ptc.ParameterizedTestCase): def test_position_sp_large_negative(self): self._param['motor'].position_sp = -1000000 self.assertEqual(self._param['motor'].position_sp, -1000000) def test_position_sp_min_negative(self): self._param['motor'].position_sp = -1 self.assertEqual(self._param['motor'].position_sp, -1) def test_position_sp_zero(self): self._param['motor'].position_sp = 0 self.assertEqual(self._param['motor'].position_sp, 0) def test_position_sp_min_positive(self): self._param['motor'].position_sp = 1 self.assertEqual(self._param['motor'].position_sp, 1) def test_position_sp_large_positive(self): self._param['motor'].position_sp = 1000000 self.assertEqual(self._param['motor'].position_sp, 1000000) def test_position_sp_after_reset(self): self._param['motor'].position_sp = 100 self.assertEqual(self._param['motor'].position_sp, 100) self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].position_sp, 0) class TestTachoMotorRampDownSpValue(ptc.ParameterizedTestCase): def test_ramp_down_sp_negative_value(self): with self.assertRaises(IOError): self._param['motor'].ramp_down_sp = -1 def test_ramp_down_sp_zero(self): self._param['motor'].ramp_down_sp = 0 self.assertEqual(self._param['motor'].ramp_down_sp, 0) def test_ramp_down_sp_min_positive(self): self._param['motor'].ramp_down_sp = 1 self.assertEqual(self._param['motor'].ramp_down_sp, 1) def test_ramp_down_sp_max_positive(self): self._param['motor'].ramp_down_sp = 60000 self.assertEqual(self._param['motor'].ramp_down_sp, 60000) def test_ramp_down_sp_large_positive(self): with self.assertRaises(IOError): self._param['motor'].ramp_down_sp = 60001 def test_ramp_down_sp_after_reset(self): self._param['motor'].ramp_down_sp = 100 self.assertEqual(self._param['motor'].ramp_down_sp, 100) self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].ramp_down_sp, 0) class TestTachoMotorRampUpSpValue(ptc.ParameterizedTestCase): def test_ramp_up_negative_value(self): with self.assertRaises(IOError): self._param['motor'].ramp_up_sp = -1 def test_ramp_up_sp_zero(self): self._param['motor'].ramp_up_sp = 0 self.assertEqual(self._param['motor'].ramp_up_sp, 0) def test_ramp_up_sp_min_positive(self): self._param['motor'].ramp_up_sp = 1 self.assertEqual(self._param['motor'].ramp_up_sp, 1) def test_ramp_up_sp_max_positive(self): self._param['motor'].ramp_up_sp = 60000 self.assertEqual(self._param['motor'].ramp_up_sp, 60000) def test_ramp_up_sp_large_positive(self): with self.assertRaises(IOError): self._param['motor'].ramp_up_sp = 60001 def test_ramp_up_sp_after_reset(self): self._param['motor'].ramp_up_sp = 100 self.assertEqual(self._param['motor'].ramp_up_sp, 100) self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].ramp_up_sp, 0) class TestTachoMotorSpeedValue(ptc.ParameterizedTestCase): def test_speed_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].speed = 1 def test_speed_value_after_reset(self): self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].speed, 0) class TestTachoMotorSpeedSpValue(ptc.ParameterizedTestCase): def test_speed_sp_large_negative(self): with self.assertRaises(IOError): self._param['motor'].speed_sp = -(motor_info[self._param['motor'].driver_name]['max_speed'] + 1) def test_speed_sp_max_negative(self): self._param['motor'].speed_sp = -motor_info[self._param['motor'].driver_name]['max_speed'] self.assertEqual(self._param['motor'].speed_sp, -motor_info[self._param['motor'].driver_name]['max_speed']) def test_speed_sp_min_negative(self): self._param['motor'].speed_sp = -1 self.assertEqual(self._param['motor'].speed_sp, -1) def test_speed_sp_zero(self): self._param['motor'].speed_sp = 0 self.assertEqual(self._param['motor'].speed_sp, 0) def test_speed_sp_min_positive(self): self._param['motor'].speed_sp = 1 self.assertEqual(self._param['motor'].speed_sp, 1) def test_speed_sp_max_positive(self): self._param['motor'].speed_sp = (motor_info[self._param['motor'].driver_name]['max_speed']) self.assertEqual(self._param['motor'].speed_sp, motor_info[self._param['motor'].driver_name]['max_speed']) def test_speed_sp_large_positive(self): with self.assertRaises(IOError): self._param['motor'].speed_sp = motor_info[self._param['motor'].driver_name]['max_speed'] + 1 def test_speed_sp_after_reset(self): self._param['motor'].speed_sp = motor_info[self._param['motor'].driver_name]['max_speed'] / 2 self.assertEqual(self._param['motor'].speed_sp, motor_info[self._param['motor'].driver_name]['max_speed'] / 2) self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].speed_sp, 0) class TestTachoMotorSpeedPValue(ptc.ParameterizedTestCase): def test_speed_i_negative(self): with self.assertRaises(IOError): self._param['motor'].speed_p = -1 def test_speed_p_zero(self): self._param['motor'].speed_p = 0 self.assertEqual(self._param['motor'].speed_p, 0) def test_speed_p_positive(self): self._param['motor'].speed_p = 1 self.assertEqual(self._param['motor'].speed_p, 1) def test_speed_p_after_reset(self): self._param['motor'].speed_p = 1 self._param['motor'].command = 'reset' if self._param['speed_pid']: expected = self._param['speed_pid']['kP'] else: expected = motor_info[self._param['motor'].driver_name]['speed_p'] self.assertEqual(self._param['motor'].speed_p, expected) class TestTachoMotorSpeedIValue(ptc.ParameterizedTestCase): def test_speed_i_negative(self): with self.assertRaises(IOError): self._param['motor'].speed_i = -1 def test_speed_i_zero(self): self._param['motor'].speed_i = 0 self.assertEqual(self._param['motor'].speed_i, 0) def test_speed_i_positive(self): self._param['motor'].speed_i = 1 self.assertEqual(self._param['motor'].speed_i, 1) def test_speed_i_after_reset(self): self._param['motor'].speed_i = 1 self._param['motor'].command = 'reset' if self._param['speed_pid']: expected = self._param['speed_pid']['kI'] else: expected = motor_info[self._param['motor'].driver_name]['speed_i'] self.assertEqual(self._param['motor'].speed_i, expected) class TestTachoMotorSpeedDValue(ptc.ParameterizedTestCase): def test_speed_d_negative(self): with self.assertRaises(IOError): self._param['motor'].speed_d = -1 def test_speed_d_zero(self): self._param['motor'].speed_d = 0 self.assertEqual(self._param['motor'].speed_d, 0) def test_speed_d_positive(self): self._param['motor'].speed_d = 1 self.assertEqual(self._param['motor'].speed_d, 1) def test_speed_d_after_reset(self): self._param['motor'].speed_d = 1 self._param['motor'].command = 'reset' if self._param['speed_pid']: expected = self._param['speed_pid']['kD'] else: expected = motor_info[self._param['motor'].driver_name]['speed_d'] self.assertEqual(self._param['motor'].speed_d, expected) class TestTachoMotorStateValue(ptc.ParameterizedTestCase): def test_state_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].state = 'ThisShouldNotWork' def test_state_value_after_reset(self): self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].state, []) class TestTachoMotorStopActionValue(ptc.ParameterizedTestCase): def test_stop_action_illegal(self): with self.assertRaises(IOError): self._param['motor'].stop_action = 'ThisShouldNotWork' def test_stop_action_coast(self): if 'coast' in self._param['stop_actions']: self._param['motor'].stop_action = 'coast' self.assertEqual(self._param['motor'].stop_action, 'coast') else: with self.assertRaises(IOError): self._param['motor'].stop_action = 'coast' def test_stop_action_brake(self): if 'brake' in self._param['stop_actions']: self._param['motor'].stop_action = 'brake' self.assertEqual(self._param['motor'].stop_action, 'brake') else: with self.assertRaises(IOError): self._param['motor'].stop_action = 'brake' def test_stop_action_hold(self): if 'hold' in self._param['stop_actions']: self._param['motor'].stop_action = 'hold' self.assertEqual(self._param['motor'].stop_action, 'hold') else: with self.assertRaises(IOError): self._param['motor'].stop_action = 'hold' def test_stop_action_after_reset(self): action = 1 # controller may only support one stop action if len(self._param['stop_actions']) < 2: action = 0 self._param['motor'].stop_action = self._param['stop_actions'][action] self._param['motor'].action = 'reset' self.assertEqual(self._param['motor'].stop_action, self._param['stop_actions'][0]) class TestTachoMotorStopActionsValue(ptc.ParameterizedTestCase): def test_stop_actions_value(self): self.assertTrue(self._param['motor'].stop_actions == self._param['stop_actions']) def test_stop_actions_value_is_read_only(self): with self.assertRaises(AttributeError): self._param['motor'].stop_actions = "ThisShouldNotWork" class TestTachoMotorTimeSpValue(ptc.ParameterizedTestCase): def test_time_sp_negative(self): with self.assertRaises(IOError): self._param['motor'].time_sp = -1 def test_time_sp_zero(self): self._param['motor'].time_sp = 0 self.assertEqual(self._param['motor'].time_sp, 0) def test_time_sp_min_positive(self): self._param['motor'].time_sp = 1 self.assertEqual(self._param['motor'].time_sp, 1) def test_time_sp_large_positive(self): self._param['motor'].time_sp = 1000000 self.assertEqual(self._param['motor'].time_sp, 1000000) def test_time_sp_after_reset(self): self._param['motor'].time_sp = 1 self._param['motor'].command = 'reset' self.assertEqual(self._param['motor'].time_sp, 0) ev3_params = { 'motor': ev3.Motor('outA'), 'port': 'outA', 'driver_name': 'lego-ev3-l-motor', 'commands': ['run-forever', 'run-to-abs-pos', 'run-to-rel-pos', 'run-timed', 'run-direct', 'stop', 'reset'], 'stop_actions': ['coast', 'brake', 'hold'], } evb_params = { 'motor': ev3.Motor('evb-ports:outA'), 'port': 'evb-ports:outA', 'driver_name': 'lego-ev3-l-motor', 'commands': ['run-forever', 'run-to-abs-pos', 'run-to-rel-pos', 'run-timed', 'run-direct', 'stop', 'reset'], 'stop_actions': ['coast', 'brake', 'hold'], } brickpi_params = { 'motor': ev3.Motor('ttyAMA0:MA'), 'port': 'ttyAMA0:MA', 'driver_name': 'lego-nxt-motor', 'commands': ['run-forever', 'run-to-abs-pos', 'run-to-rel-pos', 'run-timed', 'run-direct', 'stop', 'reset'], 'stop_actions': ['coast', 'hold'], 'speed_pid': { 'kP': 1000, 'kI': 60, 'kD': 0 }, 'hold_pid': { 'kP': 20000, 'kI': 0, 'kD': 0 }, } pistorms_params = { 'motor': ev3.Motor('pistorms:BAM1'), 'port': 'pistorms:BAM1', 'driver_name': 'lego-nxt-motor', 'commands': ['run-forever', 'run-to-abs-pos', 'run-to-rel-pos', 'run-timed', 'stop', 'reset'], 'stop_actions': ['coast', 'brake', 'hold'], 'speed_pid': { 'kP': 1000, 'kI': 60, 'kD': 0 }, 'hold_pid': { 'kP': 20000, 'kI': 0, 'kD': 0 }, } paramsA = pistorms_params paramsA['motor'].command = 'reset' suite = unittest.TestSuite() suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorAddressValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorCommandsValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorCountPerRotValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorDriverNameValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorDutyCycleSpValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorMaxSpeedValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionPValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionIValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionDValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPolarityValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionSpValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorRampDownSpValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorRampUpSpValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedSpValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedPValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedIValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedDValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorStateValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorStopCommandValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorStopCommandsValue, param=paramsA)) suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorTimeSpValue, param=paramsA)) if __name__ == '__main__': unittest.main(verbosity=2, buffer=True).run(suite)
mit
janmtl/pypsych
tests/data/generators/eprime.py
1
2106
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Script for generating mock EPrime test data """ import pandas as pd import numpy as np import io pd.set_option('display.max_rows', 50) pd.set_option('display.max_columns', 500) pd.set_option('display.width', 1000) from pypsych.config import Config def generate_mock_eprime_data(config_path, task_name, begaze_data, sched_path): """Generate mock eprime data based on mock begaze data.""" superconfig = Config(path=config_path) superconfig.load() config = superconfig.get_subconfig(task_name, 'EPrime') bg = begaze_data['merged_labels'][['Condition', 'ID']] ed = np.random.randint(0, 10, (bg.shape[0], len(config['channels']))) ep = pd.DataFrame(data=ed, index=bg.index, columns=config['channels']) df = pd.concat([bg, ep], axis=1, join='inner') df.rename(columns={'ID': 'Img'}, inplace=True) result = [] for _, row in df.iterrows(): props = ["\t" + str(idx) + ': ' + str(val) for idx, val in zip(list(row.index), list(row))] result.append("\n\n".join(props)) result = ('\n\n\t*** LogFrame End ***\n\n' '\tLevel: 2\n\n' '\t*** LogFrame Start ***\n\n').join(result) prestring = ('*** Header Start ***\n\n' 'GARBAGE\n\n' '*** Header End ***\n\n' '\tLevel: 2\n\n' '\t*** LogFrame Start ***\n\n') result = prestring + result + '\n\n\t*** LogFrame End ***' return {'df': df, 'raw': result} def save_mock_eprime_data(output_path, data, subject_id, task_order, task_name): """Save the mock eprime files to output_path.""" base_path = ''.join([output_path, task_name, '_', str(subject_id), str(task_order)]) raw_path = ''.join([base_path, '_eprime.txt']) df_path = ''.join([base_path, '_eprime_df.txt']) with io.open(raw_path, 'w', encoding="utf-16") as f: f.write(unicode(data['raw'])) data['df'].to_csv(df_path, sep="\t") pass
bsd-3-clause
lbin/nexar-2
lib/utils/visualization.py
1
3989
# -------------------------------------------------------- # Tensorflow Faster R-CNN # Licensed under The MIT License [see LICENSE for details] # Written by Xinlei Chen # -------------------------------------------------------- from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import PIL.Image as Image import PIL.ImageColor as ImageColor import PIL.ImageDraw as ImageDraw import PIL.ImageFont as ImageFont STANDARD_COLORS = [ 'AliceBlue', 'Chartreuse', 'Aqua', 'Aquamarine', 'Azure', 'Beige', 'Bisque', 'BlanchedAlmond', 'BlueViolet', 'BurlyWood', 'CadetBlue', 'AntiqueWhite', 'Chocolate', 'Coral', 'CornflowerBlue', 'Cornsilk', 'Crimson', 'Cyan', 'DarkCyan', 'DarkGoldenRod', 'DarkGrey', 'DarkKhaki', 'DarkOrange', 'DarkOrchid', 'DarkSalmon', 'DarkSeaGreen', 'DarkTurquoise', 'DarkViolet', 'DeepPink', 'DeepSkyBlue', 'DodgerBlue', 'FireBrick', 'FloralWhite', 'ForestGreen', 'Fuchsia', 'Gainsboro', 'GhostWhite', 'Gold', 'GoldenRod', 'Salmon', 'Tan', 'HoneyDew', 'HotPink', 'IndianRed', 'Ivory', 'Khaki', 'Lavender', 'LavenderBlush', 'LawnGreen', 'LemonChiffon', 'LightBlue', 'LightCoral', 'LightCyan', 'LightGoldenRodYellow', 'LightGray', 'LightGrey', 'LightGreen', 'LightPink', 'LightSalmon', 'LightSeaGreen', 'LightSkyBlue', 'LightSlateGray', 'LightSlateGrey', 'LightSteelBlue', 'LightYellow', 'Lime', 'LimeGreen', 'Linen', 'Magenta', 'MediumAquaMarine', 'MediumOrchid', 'MediumPurple', 'MediumSeaGreen', 'MediumSlateBlue', 'MediumSpringGreen', 'MediumTurquoise', 'MediumVioletRed', 'MintCream', 'MistyRose', 'Moccasin', 'NavajoWhite', 'OldLace', 'Olive', 'OliveDrab', 'Orange', 'OrangeRed', 'Orchid', 'PaleGoldenRod', 'PaleGreen', 'PaleTurquoise', 'PaleVioletRed', 'PapayaWhip', 'PeachPuff', 'Peru', 'Pink', 'Plum', 'PowderBlue', 'Purple', 'Red', 'RosyBrown', 'RoyalBlue', 'SaddleBrown', 'Green', 'SandyBrown', 'SeaGreen', 'SeaShell', 'Sienna', 'Silver', 'SkyBlue', 'SlateBlue', 'SlateGray', 'SlateGrey', 'Snow', 'SpringGreen', 'SteelBlue', 'GreenYellow', 'Teal', 'Thistle', 'Tomato', 'Turquoise', 'Violet', 'Wheat', 'White', 'WhiteSmoke', 'Yellow', 'YellowGreen' ] NUM_COLORS = len(STANDARD_COLORS) try: FONT = ImageFont.truetype('arial.ttf', 24) except IOError: FONT = ImageFont.load_default() def _draw_single_box(image, xmin, ymin, xmax, ymax, display_str, font, color='black', thickness=4): draw = ImageDraw.Draw(image) (left, right, top, bottom) = (xmin, xmax, ymin, ymax) draw.line([(left, top), (left, bottom), (right, bottom), (right, top), (left, top)], width=thickness, fill=color) text_bottom = bottom # Reverse list and print from bottom to top. text_width, text_height = font.getsize(display_str) margin = np.ceil(0.05 * text_height) draw.rectangle( [(left, text_bottom - text_height - 2 * margin), (left + text_width, text_bottom)], fill=color) draw.text( (left + margin, text_bottom - text_height - margin), display_str, fill='black', font=font) return image def draw_bounding_boxes(image, gt_boxes, im_info): num_boxes = gt_boxes.shape[0] gt_boxes_new = gt_boxes.copy() gt_boxes_new[:,:4] = np.round(gt_boxes_new[:,:4].copy() / im_info[2]) disp_image = Image.fromarray(np.uint8(image[0])) for i in xrange(num_boxes): this_class = int(gt_boxes_new[i, 4]) disp_image = _draw_single_box(disp_image, gt_boxes_new[i, 0], gt_boxes_new[i, 1], gt_boxes_new[i, 2], gt_boxes_new[i, 3], 'N%02d-C%02d' % (i, this_class), FONT, color=STANDARD_COLORS[this_class % NUM_COLORS]) image[0, :] = np.array(disp_image) return image
mit
liangjiaxing/sympy
sympy/galgebra/stringarrays.py
50
3306
# sympy/galgebra/stringarrays.py """ stringarrays.py are a group of helper functions to convert string input to vector and multivector class function to arrays of SymPy symbols. """ import operator from sympy.core.compatibility import reduce from itertools import combinations from sympy import S, Symbol, Function from sympy.core.compatibility import range def str_array(base, n=None): """ Generate one dimensional (list of strings) or two dimensional (list of list of strings) string array. For one dimensional arrays: - base is string of variable names separated by blanks such as base = 'a b c' which produces the string list ['a','b','c'] or it is a string with no blanks than in conjunction with the integer n generates - str_array('v',n=-3) = ['v_1','v_2','v_3'] str_array('v',n=3) = ['v__1','v__2','v__3']. In the case of LaTeX printing the '_' would give a subscript and the '__' a super script. For two dimensional arrays: - base is string where elements are separated by spaces and rows by commas so that - str_array('a b,c d') = [['a','b'],['c','d']] """ if n is None: if ',' in base: base_array = [] base_split = base.split(',') for base_arg in base_split: base_array.append(list(filter(lambda x: x != '', base_arg.split(' ')))) return base_array else: return base.split(' ') result = [] if isinstance(n, str): if n[0] == '-': for index in n[1:].split(' '): result.append(base + '_' + index) if n[0] == '+': for index in n[1:].split(' '): result.append(base + '__' + index) if n > 0: for i in range(1, n + 1): result.append(base + '__' + str(i)) if n < 0: for i in range(1, -n + 1): result.append(base + '_' + str(i)) return result def symbol_array(base, n=None): """ Generates a string arrary with str_array and replaces each string in array with Symbol of same name. """ symbol_str_lst = str_array(base, n) result = [] for symbol_str in symbol_str_lst: result.append(S(symbol_str)) return tuple(result) def fct_sym_array(str_lst, coords=None): """ Construct list of symbols or functions with names in 'str_lst'. If 'coords' are given (tuple of symbols) function list constructed, otherwise a symbol list is constructed. """ if coords is None: fs_lst = [] for sym_str in str_lst: fs_lst.append(Symbol(sym_str)) else: fs_lst = [] for fct_str in str_lst: fs_lst.append(Function(fct_str)(*coords)) return fs_lst def str_combinations(base, lst, rank=1, mode='_'): """ Construct a list of strings of the form 'base+mode+indexes' where the indexes are formed by converting 'lst' to a list of strings and then forming the 'indexes' by concatenating combinations of elements from 'lst' taken 'rank' at a time. """ a1 = combinations([str(x) for x in lst], rank) a2 = [reduce(operator.add, x) for x in a1] str_lst = [base + mode + x for x in a2] return str_lst
bsd-3-clause
DXCanas/content-curation
contentcuration/contentcuration/viewsets/channelset.py
1
4021
from django.core.exceptions import PermissionDenied from django.db.models import CharField from django.db.models import Q from rest_framework import serializers from rest_framework.permissions import IsAuthenticated from contentcuration.models import Channel from contentcuration.models import ChannelSet from contentcuration.viewsets.base import BulkListSerializer from contentcuration.viewsets.base import BulkModelSerializer from contentcuration.viewsets.base import ValuesViewset from contentcuration.viewsets.common import DistinctNotNullArrayAgg from contentcuration.viewsets.sync.constants import CHANNELSET from contentcuration.viewsets.sync.utils import generate_update_event class ChannelSetSerializer(BulkModelSerializer): channels = serializers.PrimaryKeyRelatedField( many=True, queryset=Channel.objects.all() ) def validate_channels(self, value): """ Check that the user has permission to view these channels """ try: # Some users might not want to add channels right away if value: self.context["request"].user.can_view_channel_ids([v.pk for v in value]) except (PermissionDenied, AttributeError, KeyError): raise serializers.ValidationError( "User does not have permission to view these channels" ) return value def create(self, validated_data): channels = validated_data.pop("channels", []) if "request" in self.context: user_id = self.context["request"].user.id # This has been newly created so add the current user as an editor validated_data["editors"] = [user_id] instance = super(ChannelSetSerializer, self).create(validated_data) for channel in channels: instance.secret_token.channels.add(channel) instance.secret_token.save() self.changes.append( generate_update_event( instance.id, CHANNELSET, {"secret_token": instance.secret_token.token}, ) ) return instance def update(self, instance, validated_data): channels = validated_data.pop("channels", []) for channel in channels: instance.secret_token.channels.add(channel) instance.secret_token.save() return super(ChannelSetSerializer, self).update(instance, validated_data) class Meta: model = ChannelSet fields = ("id", "name", "description", "channels") read_only_fields = ("id",) list_serializer_class = BulkListSerializer def clean_channels(item): return filter(lambda x: x is not None, item["channels"]) class ChannelSetViewSet(ValuesViewset): queryset = ChannelSet.objects.all() serializer_class = ChannelSetSerializer permission_classes = [IsAuthenticated] values = ("id", "name", "description", "channels", "secret_token__token") field_map = {"secret_token": "secret_token__token", "channels": clean_channels} def get_queryset(self): queryset = ChannelSet.objects.prefetch_related("secret_token").filter( id__in=ChannelSet.objects.filter(editors=self.request.user) .distinct() .values_list("id", flat=True) ) queryset = queryset.annotate( channels=DistinctNotNullArrayAgg( "secret_token__channels__id", filter=Q(main_tree__published=True, deleted=False), output_field=CharField(), ) ) return queryset def prefetch_queryset(self, queryset): queryset = queryset.select_related("secret_token") return queryset class PublicChannelSetSerializer(BulkModelSerializer): count = serializers.SerializerMethodField() def get_count(self, value): return value.count class Meta: model = ChannelSet fields = ("id", "name", "description", "count") read_only_fields = ("id", "name", "description", "count")
mit
wemanuel/smry
server-auth/ls/google-cloud-sdk/platform/gsutil/third_party/boto/boto/ec2/elb/securitygroup.py
152
1576
# Copyright (c) 2010 Reza Lotun http://reza.lotun.name # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. class SecurityGroup(object): def __init__(self, connection=None): self.name = None self.owner_alias = None def __repr__(self): return 'SecurityGroup(%s, %s)' % (self.name, self.owner_alias) def startElement(self, name, attrs, connection): pass def endElement(self, name, value, connection): if name == 'GroupName': self.name = value elif name == 'OwnerAlias': self.owner_alias = value
apache-2.0
kargakis/test-infra
gubernator/github/main_test.py
19
5233
#!/usr/bin/env python # Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pylint: disable=no-self-use """ To run these tests: $ pip install webtest nosegae $ nosetests --with-gae --gae-lib-root ~/google_appengine/ """ import json import unittest import webtest from google.appengine.ext import deferred from google.appengine.ext import testbed import handlers import main import models import secrets app = webtest.TestApp(main.app) class TestBase(unittest.TestCase): def init_stubs(self): self.testbed.init_memcache_stub() self.testbed.init_app_identity_stub() self.testbed.init_urlfetch_stub() self.testbed.init_blobstore_stub() self.testbed.init_datastore_v3_stub() self.testbed.init_taskqueue_stub() class AppTest(TestBase): def setUp(self): self.init_stubs() self.taskqueue = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) secrets.put('github_webhook_secret', 'some_secret', per_host=False) def get_response(self, event, body): if isinstance(body, dict): body = json.dumps(body) signature = handlers.make_signature(body) resp = app.post('/webhook', body, {'X-Github-Event': event, 'X-Hub-Signature': signature}) for task in self.taskqueue.get_filtered_tasks(): deferred.run(task.payload) return resp def test_webhook(self): self.get_response('test', {'action': 'blah'}) hooks = list(models.GithubWebhookRaw.query()) self.assertEqual(len(hooks), 1) self.assertIsNotNone(hooks[0].timestamp) def test_webhook_bad_sig(self): body = json.dumps({'action': 'blah'}) signature = handlers.make_signature(body + 'foo') app.post('/webhook', body, {'X-Github-Event': 'test', 'X-Hub-Signature': signature}, status=400) def test_webhook_missing_sig(self): app.post('/webhook', '{}', {'X-Github-Event': 'test'}, status=400) def test_webhook_unicode(self): self.get_response('test', {'action': u'blah\u03BA'}) def test_webhook_status(self): args = { 'name': 'owner/repo', 'sha': '1234', 'context': 'ci', 'state': 'success', 'target_url': 'http://example.com', 'description': 'passed the tests!', 'created_at': '2016-07-07T01:58:09Z', 'updated_at': '2016-07-07T02:03:12Z', } self.get_response('status', args) statuses = list(models.GHStatus.query_for_sha('owner/repo', '1234')) self.assertEqual(len(statuses), 1) status = statuses[0] args['repo'] = args.pop('name') for key, value in args.iteritems(): status_val = getattr(status, key) try: status_val = status_val.strftime('%Y-%m-%dT%H:%M:%SZ') except AttributeError: pass assert status_val == value, '%r != %r' % (getattr(status, key), value) PR_EVENT_BODY = { 'repository': {'full_name': 'test/test'}, 'pull_request': { 'number': 123, 'head': {'sha': 'cafe'}, 'updated_at': '2016-07-07T02:03:12+00:00', 'state': 'open', 'user': {'login': 'rmmh'}, 'assignees': [{'login': 'spxtr'}], 'title': 'test pr', }, 'action': 'opened', } def test_webhook_pr_open(self): body = json.dumps(self.PR_EVENT_BODY) self.get_response('pull_request', body) digest = models.GHIssueDigest.get('test/test', 123) self.assertTrue(digest.is_pr) self.assertTrue(digest.is_open) self.assertEqual(digest.involved, ['rmmh', 'spxtr']) self.assertEqual(digest.payload['title'], 'test pr') self.assertEqual(digest.payload['needs_rebase'], False) def test_webhook_pr_open_and_status(self): self.get_response('pull_request', self.PR_EVENT_BODY) self.get_response('status', { 'repository': self.PR_EVENT_BODY['repository'], 'name': self.PR_EVENT_BODY['repository']['full_name'], 'sha': self.PR_EVENT_BODY['pull_request']['head']['sha'], 'context': 'test-ci', 'state': 'success', 'target_url': 'example.com', 'description': 'woop!', 'created_at': '2016-07-07T01:58:09Z', 'updated_at': '2016-07-07T02:03:15Z', }) digest = models.GHIssueDigest.get('test/test', 123) self.assertEqual(digest.payload['status'], {'test-ci': ['success', 'example.com', 'woop!']})
apache-2.0
Onirik79/aaritmud
data/proto_rooms/villaggio-zingaro/villaggio-zingaro_room_locanda.py
1
1355
# -*- coding: utf-8 -*- #= DESCRIZIONE ================================================================= """ Inietta una bimba in locanda. """ #= IMPORT ====================================================================== import random from src.database import database from src.defer import defer_random_time from src.enums import TO from src.log import log from src.mob import Mob #= COSTANTI ==================================================================== PROTO_MOB_CODE = "villaggio-zingaro_mob_bimba-tereza" #= FUNZIONI ==================================================================== def on_dawn(locanda): print "<<<<< iniezione bimba in 1,60 >>>>>>>>>" defer_random_time(1, 60, coming , locanda) def coming(locanda): """ Controlla non ci siano bimbe in giro e poi la inietta. """ # È possibile visto che questa funzione viene deferrata if not locanda: return for mob in database["mobs"].itervalues(): if mob.prototype.code == PROTO_MOB_CODE: return bimba = Mob(PROTO_MOB_CODE) bimba.inject(locanda) bimba.act("Entri trotterellante in $N", TO.ENTITY, locanda) bimba.act("$n entra trotterellando in $N.", TO.OTHERS, locanda) bimba.act("Ti senti calpestare dal trotterellare di $n", TO.TARGET, locanda) #- Fine Funzione -
gpl-2.0
Donkyhotay/MoonPy
zope/security/tests/test_checker.py
1
24043
############################################################################## # # Copyright (c) 2001, 2002 Zope Corporation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Security Checker tests $Id: test_checker.py 67630 2006-04-27 00:54:03Z jim $ """ from unittest import TestCase, TestSuite, main, makeSuite from zope.interface import implements from zope.interface.verify import verifyObject from zope.testing.cleanup import CleanUp from zope.proxy import getProxiedObject from zope.security.interfaces import ISecurityPolicy, Unauthorized from zope.security.interfaces import Forbidden, ForbiddenAttribute from zope.security.management import setSecurityPolicy, newInteraction from zope.security.management import endInteraction, getInteraction from zope.security.proxy import removeSecurityProxy, getChecker, Proxy from zope.security.checker import defineChecker, undefineChecker, ProxyFactory from zope.security.checker import canWrite, canAccess from zope.security.checker import Checker, NamesChecker, CheckerPublic from zope.security.checker import BasicTypes, _checkers, NoProxy, _clear import types, pickle class SecurityPolicy(object): implements(ISecurityPolicy) def checkPermission(self, permission, object): 'See ISecurityPolicy' return permission == 'test_allowed' class RecordedSecurityPolicy(object): implements(ISecurityPolicy) def __init__(self): self._checked = [] self.permissions = {} def checkPermission(self, permission, object): 'See ISecurityPolicy' self._checked.append(permission) return self.permissions.get(permission, True) def checkChecked(self, checked): res = self._checked == checked self._checked = [] return res class TransparentProxy(object): def __init__(self, ob): self._ob = ob def __getattribute__(self, name): ob = object.__getattribute__(self, '_ob') return getattr(ob, name) class OldInst: __metaclass__ = types.ClassType a = 1 def b(self): pass c = 2 def gete(self): return 3 e = property(gete) def __getitem__(self, x): return 5, x def __setitem__(self, x, v): pass class NewInst(object, OldInst): # This is not needed, but left in to show the change of metaclass # __metaclass__ = type def gete(self): return 3 def sete(self, v): pass e = property(gete, sete) class Test(TestCase, CleanUp): def setUp(self): CleanUp.setUp(self) self.__oldpolicy = setSecurityPolicy(SecurityPolicy) newInteraction() def tearDown(self): endInteraction() setSecurityPolicy(self.__oldpolicy) CleanUp.tearDown(self) def test_typesAcceptedByDefineChecker(self): class ClassicClass: __metaclass__ = types.ClassType class NewStyleClass: __metaclass__ = type import zope.security not_a_type = object() defineChecker(ClassicClass, NamesChecker()) defineChecker(NewStyleClass, NamesChecker()) defineChecker(zope.security, NamesChecker()) self.assertRaises(TypeError, defineChecker, not_a_type, NamesChecker()) # check_getattr cases: # # - no attribute there # - method # - allow and disallow by permission def test_check_getattr(self): oldinst = OldInst() oldinst.d = OldInst() newinst = NewInst() newinst.d = NewInst() for inst in oldinst, newinst: checker = NamesChecker(['a', 'b', 'c', '__getitem__'], 'perm') self.assertRaises(Unauthorized, checker.check_getattr, inst, 'a') self.assertRaises(Unauthorized, checker.check_getattr, inst, 'b') self.assertRaises(Unauthorized, checker.check_getattr, inst, 'c') self.assertRaises(Unauthorized, checker.check, inst, '__getitem__') self.assertRaises(Forbidden, checker.check, inst, '__setitem__') self.assertRaises(Forbidden, checker.check_getattr, inst, 'd') self.assertRaises(Forbidden, checker.check_getattr, inst, 'e') self.assertRaises(Forbidden, checker.check_getattr, inst, 'f') checker = NamesChecker(['a', 'b', 'c', '__getitem__'], 'test_allowed') checker.check_getattr(inst, 'a') checker.check_getattr(inst, 'b') checker.check_getattr(inst, 'c') checker.check(inst, '__getitem__') self.assertRaises(Forbidden, checker.check, inst, '__setitem__') self.assertRaises(Forbidden, checker.check_getattr, inst, 'd') self.assertRaises(Forbidden, checker.check_getattr, inst, 'e') self.assertRaises(Forbidden, checker.check_getattr, inst, 'f') checker = NamesChecker(['a', 'b', 'c', '__getitem__'], CheckerPublic) checker.check_getattr(inst, 'a') checker.check_getattr(inst, 'b') checker.check_getattr(inst, 'c') checker.check(inst, '__getitem__') self.assertRaises(Forbidden, checker.check, inst, '__setitem__') self.assertRaises(Forbidden, checker.check_getattr, inst, 'd') self.assertRaises(Forbidden, checker.check_getattr, inst, 'e') self.assertRaises(Forbidden, checker.check_getattr, inst, 'f') def test_check_setattr(self): oldinst = OldInst() oldinst.d = OldInst() newinst = NewInst() newinst.d = NewInst() for inst in oldinst, newinst: checker = Checker({}, {'a': 'perm', 'z': 'perm'}) self.assertRaises(Unauthorized, checker.check_setattr, inst, 'a') self.assertRaises(Unauthorized, checker.check_setattr, inst, 'z') self.assertRaises(Forbidden, checker.check_setattr, inst, 'c') self.assertRaises(Forbidden, checker.check_setattr, inst, 'd') self.assertRaises(Forbidden, checker.check_setattr, inst, 'e') self.assertRaises(Forbidden, checker.check_setattr, inst, 'f') checker = Checker({}, {'a': 'test_allowed', 'z': 'test_allowed'}) checker.check_setattr(inst, 'a') checker.check_setattr(inst, 'z') self.assertRaises(Forbidden, checker.check_setattr, inst, 'd') self.assertRaises(Forbidden, checker.check_setattr, inst, 'e') self.assertRaises(Forbidden, checker.check_setattr, inst, 'f') checker = Checker({}, {'a': CheckerPublic, 'z': CheckerPublic}) checker.check_setattr(inst, 'a') checker.check_setattr(inst, 'z') self.assertRaises(Forbidden, checker.check_setattr, inst, 'd') self.assertRaises(Forbidden, checker.check_setattr, inst, 'e') self.assertRaises(Forbidden, checker.check_setattr, inst, 'f') def test_proxy(self): checker = NamesChecker(()) from zope.security.checker import BasicTypes_examples rocks = tuple(BasicTypes_examples.values()) for rock in rocks: proxy = checker.proxy(rock) self.failUnless(proxy is rock, (rock, type(proxy))) for class_ in OldInst, NewInst: inst = class_() for ob in inst, class_: proxy = checker.proxy(ob) self.failUnless(removeSecurityProxy(proxy) is ob) checker = getChecker(proxy) if ob is inst: self.assertEqual(checker.permission_id('__str__'), None) else: self.assertEqual(checker.permission_id('__str__'), CheckerPublic) #No longer doing anything special for transparent proxies. #A proxy needs to provide its own security checker. # #special = NamesChecker(['a', 'b'], 'test_allowed') #defineChecker(class_, special) # #for ob in inst, TransparentProxy(inst): # proxy = checker.proxy(ob) # self.failUnless(removeSecurityProxy(proxy) is ob) # # checker = getChecker(proxy) # self.failUnless(checker is special, # checker.get_permissions) # # proxy2 = checker.proxy(proxy) # self.failUnless(proxy2 is proxy, [proxy, proxy2]) def testLayeredProxies(self): """Tests that a Proxy will not be re-proxied.""" class Base: __Security_checker__ = NamesChecker(['__Security_checker__']) base = Base() checker = Checker({}) # base is not proxied, so we expect a proxy proxy1 = checker.proxy(base) self.assert_(type(proxy1) is Proxy) self.assert_(getProxiedObject(proxy1) is base) # proxy is a proxy, so we don't expect to get another proxy2 = checker.proxy(proxy1) self.assert_(proxy2 is proxy1) self.assert_(getProxiedObject(proxy2) is base) def testMultiChecker(self): from zope.interface import Interface class I1(Interface): def f1(): '' def f2(): '' class I2(I1): def f3(): '' def f4(): '' class I3(Interface): def g(): '' from zope.exceptions import DuplicationError from zope.security.checker import MultiChecker self.assertRaises(DuplicationError, MultiChecker, [(I1, 'p1'), (I2, 'p2')]) self.assertRaises(DuplicationError, MultiChecker, [(I1, 'p1'), {'f2': 'p2'}]) MultiChecker([(I1, 'p1'), (I2, 'p1')]) checker = MultiChecker([ (I2, 'p1'), {'a': 'p3'}, (I3, 'p2'), (('x','y','z'), 'p4'), ]) self.assertEqual(checker.permission_id('f1'), 'p1') self.assertEqual(checker.permission_id('f2'), 'p1') self.assertEqual(checker.permission_id('f3'), 'p1') self.assertEqual(checker.permission_id('f4'), 'p1') self.assertEqual(checker.permission_id('g'), 'p2') self.assertEqual(checker.permission_id('a'), 'p3') self.assertEqual(checker.permission_id('x'), 'p4') self.assertEqual(checker.permission_id('y'), 'p4') self.assertEqual(checker.permission_id('z'), 'p4') self.assertEqual(checker.permission_id('zzz'), None) def testAlwaysAvailable(self): from zope.security.checker import NamesChecker checker = NamesChecker(()) class C(object): pass self.assertEqual(checker.check(C, '__hash__'), None) self.assertEqual(checker.check(C, '__nonzero__'), None) self.assertEqual(checker.check(C, '__class__'), None) self.assertEqual(checker.check(C, '__implements__'), None) self.assertEqual(checker.check(C, '__lt__'), None) self.assertEqual(checker.check(C, '__le__'), None) self.assertEqual(checker.check(C, '__gt__'), None) self.assertEqual(checker.check(C, '__ge__'), None) self.assertEqual(checker.check(C, '__eq__'), None) self.assertEqual(checker.check(C, '__ne__'), None) def test_setattr(self): checker = NamesChecker(['a', 'b', 'c', '__getitem__'], 'test_allowed') for inst in NewInst(), OldInst(): self.assertRaises(Forbidden, checker.check_setattr, inst, 'a') self.assertRaises(Forbidden, checker.check_setattr, inst, 'z') # TODO: write a test to see that # Checker.check/check_setattr handle permission # values that evaluate to False def test_ProxyFactory(self): class SomeClass(object): pass import zope.security checker = NamesChecker() specific_checker = NamesChecker() checker_as_magic_attr = NamesChecker() obj = SomeClass() proxy = ProxyFactory(obj) self.assert_(type(proxy) is Proxy) from zope.security.checker import _defaultChecker self.assert_(getChecker(proxy) is _defaultChecker) defineChecker(SomeClass, checker) proxy = ProxyFactory(obj) self.assert_(type(proxy) is Proxy) self.assert_(getChecker(proxy) is checker) obj.__Security_checker__ = checker_as_magic_attr proxy = ProxyFactory(obj) self.assert_(type(proxy) is Proxy) self.assert_(getChecker(proxy) is checker_as_magic_attr) proxy = ProxyFactory(obj, specific_checker) self.assert_(type(proxy) is Proxy) self.assert_(getChecker(proxy) is specific_checker) def test_define_and_undefineChecker(self): class SomeClass(object): pass obj = SomeClass() checker = NamesChecker() from zope.security.checker import _defaultChecker, selectChecker self.assert_(selectChecker(obj) is _defaultChecker) defineChecker(SomeClass, checker) self.assert_(selectChecker(obj) is checker) undefineChecker(SomeClass) self.assert_(selectChecker(obj) is _defaultChecker) def test_ProxyFactory_using_proxy(self): class SomeClass(object): pass obj = SomeClass() checker = NamesChecker() proxy1 = ProxyFactory(obj) proxy2 = ProxyFactory(proxy1) self.assert_(proxy1 is proxy2) # Trying to change the checker on a proxy. self.assertRaises(TypeError, ProxyFactory, proxy1, checker) # Setting exactly the same checker as the proxy already has. proxy1 = ProxyFactory(obj, checker) proxy2 = ProxyFactory(proxy1, checker) self.assert_(proxy1 is proxy2) def test_canWrite_canAccess(self): # the canWrite and canAccess functions are conveniences. Often code # wants to check if a certain option is open to a user before # presenting it. If the code relies on a certain permission, the # Zope 3 goal of keeping knowledge of security assertions out of the # code and only in the zcml assertions is broken. Instead, ask if the # current user canAccess or canWrite some pertinent aspect of the # object. canAccess is used for both read access on an attribute # and call access to methods. # For example, consider this humble pair of class and object. class SomeClass(object): pass obj = SomeClass() # We will establish a checker for the class. This is the standard # name-based checker, and works by specifying two dicts, one for read # and one for write. Each item in the dictionary should be an # attribute name and the permission required to read or write it. # For these tests, the SecurityPolicy defined at the top of this file # is in place. It is a stub. Normally, the security policy would # have knowledge of interactions and participants, and would determine # on the basis of the particpants and the object if a certain permission # were authorized. This stub simply says that the 'test_allowed' # permission is authorized and nothing else is, for any object you pass # it. # Therefore, according to the checker created here, the current # 'interaction' (as stubbed out in the security policy) will be allowed # to access and write foo, and access bar. The interaction is # unauthorized for accessing baz and writing bar. Any other access or # write is not merely unauthorized but forbidden--including write access # for baz. checker = Checker( {'foo':'test_allowed', # these are the read settings 'bar':'test_allowed', 'baz':'you_will_not_have_this_permission'}, {'foo':'test_allowed', # these are the write settings 'bar':'you_will_not_have_this_permission', 'bing':'you_will_not_have_this_permission'}) defineChecker(SomeClass, checker) # so, our hapless interaction may write and access foo... self.assert_(canWrite(obj, 'foo')) self.assert_(canAccess(obj, 'foo')) # ...may access, but not write, bar... self.assert_(not canWrite(obj, 'bar')) self.assert_(canAccess(obj, 'bar')) # ...and may access baz. self.assert_(not canAccess(obj, 'baz')) # there are no security assertions for writing or reading shazam, so # checking these actually raises Forbidden. The rationale behind # exposing the Forbidden exception is primarily that it is usually # indicative of programming or configuration errors. self.assertRaises(Forbidden, canAccess, obj, 'shazam') self.assertRaises(Forbidden, canWrite, obj, 'shazam') # However, we special-case canWrite when an attribute has a Read # setting but no Write setting. Consider the 'baz' attribute from the # checker above: it is readonly. All users are forbidden to write # it. This is a very reasonable configuration. Therefore, canWrite # will hide the Forbidden exception if and only if there is a # setting for accessing the attribute. self.assert_(not canWrite(obj, 'baz')) # The reverse is not true at the moment: an unusal case like the # write-only 'bing' attribute will return a boolean for canWrite, # but canRead will simply raise a Forbidden exception, without checking # write settings. self.assert_(not canWrite(obj, 'bing')) self.assertRaises(Forbidden, canAccess, obj, 'bing') class TestCheckerPublic(TestCase): def test_that_pickling_CheckerPublic_retains_identity(self): self.assert_(pickle.loads(pickle.dumps(CheckerPublic)) is CheckerPublic) def test_that_CheckerPublic_identity_works_even_when_proxied(self): self.assert_(ProxyFactory(CheckerPublic) is CheckerPublic) class TestMixinDecoratedChecker(TestCase): def decoratedSetUp(self): self.policy = RecordedSecurityPolicy self._oldpolicy = setSecurityPolicy(self.policy) newInteraction() self.interaction = getInteraction() self.obj = object() def decoratedTearDown(self): endInteraction() setSecurityPolicy(self._oldpolicy) def check_checking_impl(self, checker): o = self.obj checker.check_getattr(o, 'both_get_set') self.assert_(self.interaction.checkChecked(['dc_get_permission'])) checker.check_getattr(o, 'c_only') self.assert_(self.interaction.checkChecked(['get_permission'])) checker.check_getattr(o, 'd_only') self.assert_(self.interaction.checkChecked(['dc_get_permission'])) self.assertRaises(ForbiddenAttribute, checker.check_getattr, o, 'completely_different_attr') self.assert_(self.interaction.checkChecked([])) checker.check(o, '__str__') self.assert_(self.interaction.checkChecked(['get_permission'])) checker.check_setattr(o, 'both_get_set') self.assert_(self.interaction.checkChecked(['dc_set_permission'])) self.assertRaises(ForbiddenAttribute, checker.check_setattr, o, 'c_only') self.assert_(self.interaction.checkChecked([])) self.assertRaises(ForbiddenAttribute, checker.check_setattr, o, 'd_only') self.assert_(self.interaction.checkChecked([])) originalChecker = NamesChecker(['both_get_set', 'c_only', '__str__'], 'get_permission') decorationSetMap = {'both_get_set': 'dc_set_permission'} decorationGetMap = {'both_get_set': 'dc_get_permission', 'd_only': 'dc_get_permission'} overridingChecker = Checker(decorationGetMap, decorationSetMap) class TestCombinedChecker(TestMixinDecoratedChecker, TestCase): def setUp(self): TestCase.setUp(self) self.decoratedSetUp() def tearDown(self): self.decoratedTearDown() TestCase.tearDown(self) def test_checking(self): from zope.security.checker import CombinedChecker cc = CombinedChecker(self.overridingChecker, self.originalChecker) self.check_checking_impl(cc) # When a permission is not authorized by the security policy, # the policy is queried twice per check_getattr -- once for each # checker. self.interaction.permissions['dc_get_permission'] = False cc.check_getattr(self.obj, 'both_get_set') self.assert_( self.interaction.checkChecked(['dc_get_permission', 'get_permission']) ) # This should raise Unauthorized instead of ForbiddenAttribute, since # access can be granted if you e.g. login with different credentials. self.assertRaises(Unauthorized, cc.check_getattr, self.obj, 'd_only') self.assertRaises(Unauthorized, cc.check, self.obj, 'd_only') def test_interface(self): from zope.security.checker import CombinedChecker from zope.security.interfaces import IChecker dc = CombinedChecker(self.overridingChecker, self.originalChecker) verifyObject(IChecker, dc) class TestBasicTypes(TestCase): def test(self): class MyType(object): pass class MyType2(object): pass # When an item is added to the basic types, it should also be added to # the list of checkers. BasicTypes[MyType] = NoProxy self.assert_(MyType in _checkers) # If we clear the checkers, the type should still be there _clear() self.assert_(MyType in BasicTypes) self.assert_(MyType in _checkers) # Now delete the type from the dictionary, will also delete it from # the checkers del BasicTypes[MyType] self.assert_(MyType not in BasicTypes) self.assert_(MyType not in _checkers) # The quick way of adding new types is using update BasicTypes.update({MyType: NoProxy, MyType2: NoProxy}) self.assert_(MyType in BasicTypes) self.assert_(MyType2 in BasicTypes) self.assert_(MyType in _checkers) self.assert_(MyType2 in _checkers) # Let's remove the two new types del BasicTypes[MyType] del BasicTypes[MyType2] # Of course, BasicTypes is a full dictionary. This dictionary is by # default filled with several entries: keys = BasicTypes.keys() keys.sort() self.assert_(bool in keys) self.assert_(int in keys) self.assert_(float in keys) self.assert_(str in keys) self.assert_(unicode in keys) self.assert_(object in keys) # ... # Finally, the ``clear()`` method has been deactivated to avoid # unwanted deletions. self.assertRaises(NotImplementedError, BasicTypes.clear) def test_suite(): return TestSuite(( makeSuite(Test), makeSuite(TestCheckerPublic), makeSuite(TestCombinedChecker), makeSuite(TestBasicTypes), )) if __name__=='__main__': main(defaultTest='test_suite')
gpl-3.0
listamilton/supermilton.repository
plugin.video.supermiltonflix/lib/keyring/backends/file.py
5
9728
from __future__ import with_statement import abc import base64 import getpass import json import os import sys from ..backend import KeyringBackend from ..errors import PasswordDeleteError from ..py27compat import configparser from ..util import platform_, properties from ..util.escape import escape as escape_for_ini class FileBacked(object): @abc.abstractproperty def filename(self): """ The filename used to store the passwords. """ @properties.NonDataProperty def file_path(self): """ The path to the file where passwords are stored. This property may be overridden by the subclass or at the instance level. """ return os.path.join(platform_.data_root(), self.filename) class BaseKeyring(FileBacked, KeyringBackend): """ BaseKeyring is a file-based implementation of keyring. This keyring stores the password directly in the file and provides methods which may be overridden by subclasses to support encryption and decryption. The encrypted payload is stored in base64 format. """ @abc.abstractmethod def encrypt(self, password): """ Given a password (byte string), return an encrypted byte string. """ @abc.abstractmethod def decrypt(self, password_encrypted): """ Given a password encrypted by a previous call to `encrypt`, return the original byte string. """ def get_password(self, service, username): """ Read the password from the file. """ service = escape_for_ini(service) username = escape_for_ini(username) # load the passwords from the file config = configparser.RawConfigParser() if os.path.exists(self.file_path): config.read(self.file_path) # fetch the password try: password_base64 = config.get(service, username).encode() # decode with base64 password_encrypted = base64.decodestring(password_base64) # decrypted the password password = self.decrypt(password_encrypted).decode('utf-8') except (configparser.NoOptionError, configparser.NoSectionError): password = None return password def set_password(self, service, username, password): """Write the password in the file. """ service = escape_for_ini(service) username = escape_for_ini(username) # encrypt the password password_encrypted = self.encrypt(password.encode('utf-8')) # encode with base64 password_base64 = base64.encodestring(password_encrypted).decode() # ensure the file exists self._ensure_file_path() # load the keyring from the disk config = configparser.RawConfigParser() config.read(self.file_path) # update the keyring with the password if not config.has_section(service): config.add_section(service) config.set(service, username, password_base64) # save the keyring back to the file with open(self.file_path, 'w') as config_file: config.write(config_file) def _ensure_file_path(self): """ Ensure the storage path exists. If it doesn't, create it with "go-rwx" permissions. """ storage_root = os.path.dirname(self.file_path) if storage_root and not os.path.isdir(storage_root): os.makedirs(storage_root) if not os.path.isfile(self.file_path): # create the file without group/world permissions with open(self.file_path, 'w'): pass user_read_write = 0o600 os.chmod(self.file_path, user_read_write) def delete_password(self, service, username): """Delete the password for the username of the service. """ service = escape_for_ini(service) username = escape_for_ini(username) config = configparser.RawConfigParser() if os.path.exists(self.file_path): config.read(self.file_path) try: if not config.remove_option(service, username): raise PasswordDeleteError("Password not found") except configparser.NoSectionError: raise PasswordDeleteError("Password not found") # update the file with open(self.file_path, 'w') as config_file: config.write(config_file) class PlaintextKeyring(BaseKeyring): """Simple File Keyring with no encryption""" priority = .5 "Applicable for all platforms, but not recommended" filename = 'keyring_pass.cfg' def encrypt(self, password): """Directly return the password itself. """ return password def decrypt(self, password_encrypted): """Directly return encrypted password. """ return password_encrypted class Encrypted(object): """ PyCrypto-backed Encryption support """ block_size = 32 def _create_cipher(self, password, salt, IV): """ Create the cipher object to encrypt or decrypt a payload. """ from Crypto.Protocol.KDF import PBKDF2 from Crypto.Cipher import AES pw = PBKDF2(password, salt, dkLen=self.block_size) return AES.new(pw[:self.block_size], AES.MODE_CFB, IV) def _get_new_password(self): while True: password = getpass.getpass( "Please set a password for your new keyring: ") confirm = getpass.getpass('Please confirm the password: ') if password != confirm: sys.stderr.write("Error: Your passwords didn't match\n") continue if '' == password.strip(): # forbid the blank password sys.stderr.write("Error: blank passwords aren't allowed.\n") continue return password class EncryptedKeyring(Encrypted, BaseKeyring): """PyCrypto File Keyring""" filename = 'crypted_pass.cfg' pw_prefix = 'pw:'.encode() @properties.ClassProperty @classmethod def priority(self): "Applicable for all platforms, but not recommended." try: __import__('Crypto.Cipher.AES') __import__('Crypto.Protocol.KDF') __import__('Crypto.Random') except ImportError: raise RuntimeError("PyCrypto required") if not json: raise RuntimeError("JSON implementation such as simplejson " "required.") return .6 @properties.NonDataProperty def keyring_key(self): # _unlock or _init_file will set the key or raise an exception if self._check_file(): self._unlock() else: self._init_file() return self.keyring_key def _init_file(self): """ Initialize a new password file and set the reference password. """ self.keyring_key = self._get_new_password() # set a reference password, used to check that the password provided # matches for subsequent checks. self.set_password('keyring-setting', 'password reference', 'password reference value') def _check_file(self): """ Check if the file exists and has the expected password reference. """ if not os.path.exists(self.file_path): return False self._migrate() config = configparser.RawConfigParser() config.read(self.file_path) try: config.get( escape_for_ini('keyring-setting'), escape_for_ini('password reference'), ) except (configparser.NoSectionError, configparser.NoOptionError): return False return True def _unlock(self): """ Unlock this keyring by getting the password for the keyring from the user. """ self.keyring_key = getpass.getpass( 'Please enter password for encrypted keyring: ') try: ref_pw = self.get_password('keyring-setting', 'password reference') assert ref_pw == 'password reference value' except AssertionError: self._lock() raise ValueError("Incorrect Password") def _lock(self): """ Remove the keyring key from this instance. """ del self.keyring_key def encrypt(self, password): from Crypto.Random import get_random_bytes salt = get_random_bytes(self.block_size) from Crypto.Cipher import AES IV = get_random_bytes(AES.block_size) cipher = self._create_cipher(self.keyring_key, salt, IV) password_encrypted = cipher.encrypt(self.pw_prefix + password) # Serialize the salt, IV, and encrypted password in a secure format data = dict( salt=salt, IV=IV, password_encrypted=password_encrypted, ) for key in data: data[key] = base64.encodestring(data[key]).decode() return json.dumps(data).encode() def decrypt(self, password_encrypted): # unpack the encrypted payload data = json.loads(password_encrypted.decode()) for key in data: data[key] = base64.decodestring(data[key].encode()) cipher = self._create_cipher(self.keyring_key, data['salt'], data['IV']) plaintext = cipher.decrypt(data['password_encrypted']) assert plaintext.startswith(self.pw_prefix) return plaintext[3:] def _migrate(self, keyring_password=None): """ Convert older keyrings to the current format. """
gpl-2.0
jay3sh/vispy
examples/basics/scene/isocurve_updates.py
17
4424
# -*- coding: utf-8 -*- # vispy: gallery 30 # ----------------------------------------------------------------------------- # Copyright (c) 2015, Vispy Development Team. All Rights Reserved. # Distributed under the (new) BSD License. See LICENSE.txt for more info. # ----------------------------------------------------------------------------- """ Show use of SceneCanvas to display and update Image and Isocurve visuals using ViewBox visual. """ import sys import numpy as np from itertools import cycle from vispy import app, scene from vispy.scene import STTransform from vispy.util.filter import gaussian_filter from vispy.color import get_colormaps, get_color_names canvas = scene.SceneCanvas(keys='interactive', title='Show update capabilities of Isocurve Visual', show=True) canvas.show() # Setting up four viewboxes vb1 = scene.widgets.ViewBox(border_color='yellow', parent=canvas.scene) vb2 = scene.widgets.ViewBox(border_color='blue', parent=canvas.scene) vb3 = scene.widgets.ViewBox(border_color='red', parent=canvas.scene) vb4 = scene.widgets.ViewBox(border_color='purple', parent=canvas.scene) vb = (vb1, vb2, vb3, vb4) # add grid as central widget, add viewboxes into grid grid = canvas.central_widget.add_grid() grid.padding = 0 grid.add_widget(vb1, 0, 0) grid.add_widget(vb2, 0, 1) grid.add_widget(vb3, 1, 0) grid.add_widget(vb4, 1, 1) # panzoom cameras for every viewbox for box in vb: box.camera = 'panzoom' box.camera.aspect = 1.0 # Create random image img_data1 = np.empty((100, 100, 3), dtype=np.ubyte) noise = np.random.normal(size=(100, 100), loc=50, scale=150) noise = gaussian_filter(noise, (4, 4, 0)) img_data1[:] = noise[..., np.newaxis] # create 2d array with some function x, y = np.mgrid[0:2*np.pi:101j, 0:2*np.pi:101j] myfunc = np.cos(2*x[:-1, :-1]) + np.sin(2*y[:-1, :-1]) # add image to viewbox1 image1 = scene.visuals.Image(noise, parent=vb1.scene, cmap='cubehelix') # move image behind curves image1.transform = STTransform(translate=(0, 0, 0.5)) vb1.camera.set_range() # add image to viewbox2 image2 = scene.visuals.Image(myfunc, parent=vb2.scene, cmap='cubehelix') # move image behind curves image2.transform = STTransform(translate=(0, 0, 0.5)) vb2.camera.set_range() # create some level for the isocurves levels1 = np.linspace(noise.min(), noise.max(), num=52, endpoint=True)[1:-1] levels2 = np.linspace(myfunc.min(), myfunc.max(), num=52, endpoint=True)[1:-1] # create curve 1a (image overlay, black) and 1b (plain, cubehelix colored) # to viewboxes 1 and 3 curve1a = scene.visuals.Isocurve( noise, levels=levels1[::4], color_lev='k', parent=vb1.scene) curve1b = scene.visuals.Isocurve( noise, levels=levels1, color_lev='cubehelix', parent=vb3.scene) # create curve 2a (2darray overlay, black) and 2b (plain, cubehelix colored) # to viewboxes 2 and 4 curve2a = scene.visuals.Isocurve( myfunc, levels=levels2[::4], color_lev='k', parent=vb2.scene) curve2b = scene.visuals.Isocurve( myfunc, levels=levels2, color_lev='cubehelix', parent=vb4.scene) # set viewport vb3.camera.set_range((0, 100), (0, 100)) vb4.camera.set_range((0, 100), (0, 100)) # setup update parameters up = 1 index = 1 clip = np.linspace(myfunc.min(), myfunc.max(), num=51) cmap = cycle(get_colormaps()) color = cycle(get_color_names()) def update(ev): global myfunc, index, up, levels2, noise, cmap, color if index > 0 and index < 25: # update left panes rolling upwards noise = np.roll(noise, 1, axis=0) image1.set_data(noise) curve1a.set_data(noise) curve1b.set_data(noise) # update colors/colormap if (index % 5) == 0: curve1b.color = next(color) cm = next(cmap) image2.cmap = cm curve2b.color = cm # change isocurves by clipping data/or changing limits # update curve1b levels (clip) curve1b.levels = levels1[index:-index] # update curve2b data with clipped data im2 = np.clip(myfunc, clip[index], clip[-index]) curve2b.set_data(im2) index += up else: # change index direction up = -up index += up canvas.update() # setup timer timer = app.Timer() timer.connect(update) # slow this down a bit to better see what happens timer.start(0) if __name__ == '__main__' and sys.flags.interactive == 0: app.run()
bsd-3-clause
hackendless/heekscnc
nc/hpgl3d.py
34
2237
# hpgl3d.py # # Copyright (c) 2009, Dan Heeks # This program is released under the BSD license. See the file COPYING for details. # import nc import hpgl2d import math class Creator(hpgl2d.Creator): def __init__(self): hpgl2d.Creator.__init__(self) self.z = int(0) self.metric() # set self.units_to_mc_units self.doing_rapid = True def program_begin(self, id, name=''): self.write(';;^IN;!MC0;\n') self.write('V50.0;^PR;Z0,0,10500;^PA;\n') self.write('!RC15;\n') self.write('!MC1;\n') def program_end(self): self.write('!VZ50.0;!ZM0;\n') self.write('!MC0;^IN;\n') def get_machine_xyz(self, x=None, y=None, z=None): machine_x = self.x machine_y = self.y machine_z = self.z if x != None: machine_x = self.closest_int(x * self.units_to_mc_units) if y != None: machine_y = self.closest_int(y * self.units_to_mc_units) if z != None: machine_z = self.closest_int(z * self.units_to_mc_units) return machine_x, machine_y, machine_z def rapid(self, x=None, y=None, z=None, a=None, b=None, c=None): # do a rapid move. # for now, do all rapid moves at V50 ( 50 mm/s ) mx, my, mz = self.get_machine_xyz(x, y, z) if mx != self.x or my != self.y or mz != self.z: if self.doing_rapid == False: self.write('V50.0;') self.write(('Z%i' % mx) + (',%i' % my) + (',%i;\n' % mz)) self.x = mx self.y = my self.z = mz self.doing_rapid = True def feed(self, x=None, y=None, z=None, a=None, b=None, c=None): # do a feed move. # for now, do all feed moves at V10 ( 10 mm/s ) mx, my, mz = self.get_machine_xyz(x, y, z) if mx != self.x or my != self.y or mz != self.z: if self.doing_rapid == True: self.write('V10.0;') self.write(('Z%i' % mx) + (',%i' % my) + (',%i;\n' % mz)) self.x = mx self.y = my self.z = mz self.doing_rapid = False nc.creator = Creator()
bsd-3-clause
chauhanhardik/populo
common/lib/xmodule/xmodule/assetstore/__init__.py
124
11966
""" Classes representing asset metadata. """ from datetime import datetime import dateutil.parser import pytz import json from contracts import contract, new_contract from opaque_keys.edx.keys import CourseKey, AssetKey from lxml import etree new_contract('AssetKey', AssetKey) new_contract('CourseKey', CourseKey) new_contract('datetime', datetime) new_contract('basestring', basestring) new_contract('long', long) new_contract('AssetElement', lambda x: isinstance(x, etree._Element) and x.tag == "asset") # pylint: disable=protected-access new_contract('AssetsElement', lambda x: isinstance(x, etree._Element) and x.tag == "assets") # pylint: disable=protected-access class AssetMetadata(object): """ Stores the metadata associated with a particular course asset. The asset metadata gets stored in the modulestore. """ TOP_LEVEL_ATTRS = ['pathname', 'internal_name', 'locked', 'contenttype', 'thumbnail', 'fields'] EDIT_INFO_ATTRS = ['curr_version', 'prev_version', 'edited_by', 'edited_by_email', 'edited_on'] CREATE_INFO_ATTRS = ['created_by', 'created_by_email', 'created_on'] ATTRS_ALLOWED_TO_UPDATE = TOP_LEVEL_ATTRS + EDIT_INFO_ATTRS ASSET_TYPE_ATTR = 'type' ASSET_BASENAME_ATTR = 'filename' XML_ONLY_ATTRS = [ASSET_TYPE_ATTR, ASSET_BASENAME_ATTR] XML_ATTRS = XML_ONLY_ATTRS + ATTRS_ALLOWED_TO_UPDATE + CREATE_INFO_ATTRS # Type for assets uploaded by a course author in Studio. GENERAL_ASSET_TYPE = 'asset' # Asset section XML tag for asset metadata as XML. ALL_ASSETS_XML_TAG = 'assets' # Individual asset XML tag for asset metadata as XML. ASSET_XML_TAG = 'asset' # Top-level directory name in exported course XML which holds asset metadata. EXPORTED_ASSET_DIR = 'assets' # Filename of all asset metadata exported as XML. EXPORTED_ASSET_FILENAME = 'assets.xml' @contract(asset_id='AssetKey', pathname='basestring|None', internal_name='basestring|None', locked='bool|None', contenttype='basestring|None', thumbnail='basestring|None', fields='dict|None', curr_version='basestring|None', prev_version='basestring|None', created_by='int|long|None', created_by_email='basestring|None', created_on='datetime|None', edited_by='int|long|None', edited_by_email='basestring|None', edited_on='datetime|None') def __init__(self, asset_id, pathname=None, internal_name=None, locked=None, contenttype=None, thumbnail=None, fields=None, curr_version=None, prev_version=None, created_by=None, created_by_email=None, created_on=None, edited_by=None, edited_by_email=None, edited_on=None, field_decorator=None,): """ Construct a AssetMetadata object. Arguments: asset_id (AssetKey): Key identifying this particular asset. pathname (str): Original path to file at asset upload time. internal_name (str): Name, url, or handle for the storage system to access the file. locked (bool): If True, only course participants can access the asset. contenttype (str): MIME type of the asset. thumbnail (str): the internal_name for the thumbnail if one exists fields (dict): fields to save w/ the metadata curr_version (str): Current version of the asset. prev_version (str): Previous version of the asset. created_by (int): User ID of initial user to upload this asset. created_by_email (str): Email address of initial user to upload this asset. created_on (datetime): Datetime of intial upload of this asset. edited_by (int): User ID of last user to upload this asset. edited_by_email (str): Email address of last user to upload this asset. edited_on (datetime): Datetime of last upload of this asset. field_decorator (function): used by strip_key to convert OpaqueKeys to the app's understanding. Not saved. """ self.asset_id = asset_id if field_decorator is None else field_decorator(asset_id) self.pathname = pathname # Path w/o filename. self.internal_name = internal_name self.locked = locked self.contenttype = contenttype self.thumbnail = thumbnail self.curr_version = curr_version self.prev_version = prev_version now = datetime.now(pytz.utc) self.edited_by = edited_by self.edited_by_email = edited_by_email self.edited_on = edited_on or now # created_by, created_by_email, and created_on should only be set here. self.created_by = created_by self.created_by_email = created_by_email self.created_on = created_on or now self.fields = fields or {} def __repr__(self): return """AssetMetadata{!r}""".format(( self.asset_id, self.pathname, self.internal_name, self.locked, self.contenttype, self.fields, self.curr_version, self.prev_version, self.created_by, self.created_by_email, self.created_on, self.edited_by, self.edited_by_email, self.edited_on, )) def update(self, attr_dict): """ Set the attributes on the metadata. Any which are not in ATTRS_ALLOWED_TO_UPDATE get put into fields. Arguments: attr_dict: Prop, val dictionary of all attributes to set. """ for attr, val in attr_dict.iteritems(): if attr in self.ATTRS_ALLOWED_TO_UPDATE: setattr(self, attr, val) else: self.fields[attr] = val def to_storable(self): """ Converts metadata properties into a MongoDB-storable dict. """ return { 'filename': self.asset_id.path, 'asset_type': self.asset_id.asset_type, 'pathname': self.pathname, 'internal_name': self.internal_name, 'locked': self.locked, 'contenttype': self.contenttype, 'thumbnail': self.thumbnail, 'fields': self.fields, 'edit_info': { 'curr_version': self.curr_version, 'prev_version': self.prev_version, 'created_by': self.created_by, 'created_by_email': self.created_by_email, 'created_on': self.created_on, 'edited_by': self.edited_by, 'edited_by_email': self.edited_by_email, 'edited_on': self.edited_on } } @contract(asset_doc='dict|None') def from_storable(self, asset_doc): """ Fill in all metadata fields from a MongoDB document. The asset_id prop is initialized upon construction only. """ if asset_doc is None: return self.pathname = asset_doc['pathname'] self.internal_name = asset_doc['internal_name'] self.locked = asset_doc['locked'] self.contenttype = asset_doc['contenttype'] self.thumbnail = asset_doc['thumbnail'] self.fields = asset_doc['fields'] self.curr_version = asset_doc['edit_info']['curr_version'] self.prev_version = asset_doc['edit_info']['prev_version'] self.created_by = asset_doc['edit_info']['created_by'] self.created_by_email = asset_doc['edit_info']['created_by_email'] self.created_on = asset_doc['edit_info']['created_on'] self.edited_by = asset_doc['edit_info']['edited_by'] self.edited_by_email = asset_doc['edit_info']['edited_by_email'] self.edited_on = asset_doc['edit_info']['edited_on'] @contract(node='AssetElement') def from_xml(self, node): """ Walk the etree XML node and fill in the asset metadata. The node should be a top-level "asset" element. """ for child in node: qname = etree.QName(child) tag = qname.localname if tag in self.XML_ATTRS: value = child.text if tag in self.XML_ONLY_ATTRS: # An AssetLocator is constructed separately from these parts. continue elif tag == 'locked': # Boolean. value = True if value == "true" else False elif value == 'None': # None. value = None elif tag in ('created_on', 'edited_on'): # ISO datetime. value = dateutil.parser.parse(value) elif tag in ('created_by', 'edited_by'): # Integer representing user id. value = int(value) elif tag == 'fields': # Dictionary. value = json.loads(value) setattr(self, tag, value) @contract(node='AssetElement') def to_xml(self, node): """ Add the asset data as XML to the passed-in node. The node should already be created as a top-level "asset" element. """ for attr in self.XML_ATTRS: child = etree.SubElement(node, attr) # Get the value. if attr == self.ASSET_TYPE_ATTR: value = self.asset_id.asset_type elif attr == self.ASSET_BASENAME_ATTR: value = self.asset_id.path else: value = getattr(self, attr) # Format the value. if isinstance(value, bool): value = "true" if value else "false" elif isinstance(value, datetime): value = value.isoformat() elif isinstance(value, dict): value = json.dumps(value) else: value = unicode(value) child.text = value @staticmethod @contract(node='AssetsElement', assets=list) def add_all_assets_as_xml(node, assets): """ Take a list of AssetMetadata objects. Add them all to the node. The node should already be created as a top-level "assets" element. """ for asset in assets: asset_node = etree.SubElement(node, "asset") asset.to_xml(asset_node) class CourseAssetsFromStorage(object): """ Wrapper class for asset metadata lists returned from modulestore storage. """ @contract(course_id='CourseKey', asset_md=dict) def __init__(self, course_id, doc_id, asset_md): """ Params: course_id: Course ID for which the asset metadata is stored. doc_id: ObjectId of MongoDB document asset_md: Dict with asset types as keys and lists of storable asset metadata as values. """ self.course_id = course_id self._doc_id = doc_id self.asset_md = asset_md @property def doc_id(self): """ Returns the ID associated with the MongoDB document which stores these course assets. """ return self._doc_id def setdefault(self, item, default=None): """ Provides dict-equivalent setdefault functionality. """ return self.asset_md.setdefault(item, default) def __getitem__(self, item): return self.asset_md[item] def __delitem__(self, item): del self.asset_md[item] def __len__(self): return len(self.asset_md) def __setitem__(self, key, value): self.asset_md[key] = value def get(self, item, default=None): """ Provides dict-equivalent get functionality. """ return self.asset_md.get(item, default) def iteritems(self): """ Iterates over the items of the asset dict. """ return self.asset_md.iteritems()
agpl-3.0
michaelpri10/WelcomeBot
google_search.py
2
1336
import urllib2 import json def google_search(search_term): """ Searches for a `search_term` which should be a string or a value convertable to string. Parameters: - str `search_term`: a string to search for Returns a tuple (on success): - first value is a list of search results for the `search_term` returned by Google API - second value is a Google Search UI URL, where more results can be obtained Returns False (on failure). -- Authors: - michaelpri10 - Jacob-Gray - Kubo2 """ # The request also includes the userip parameter which provides the end # user's IP address. Doing so will help distinguish this legitimate # server-side traffic from traffic which doesn't come from an end-user. search_term = search_term.encode('ascii', errors='replace') url = "https://ajax.googleapis.com/ajax/services/search/web?v=1.0&q=%s&userip=USERS-IP-ADDRESS" % search_term request = urllib2.Request(url, None) response = urllib2.urlopen(request) # Process the JSON string. results = json.load(response) # now have some fun with the results... if len(results["responseData"]["results"]) > 0: return results["responseData"]["results"], results["responseData"]["cursor"]["moreResultsUrl"] return False
mit
snowmantw/AutobahnTestSuite
autobahntestsuite/autobahntestsuite/case/case7_3_2.py
14
1535
############################################################################### ## ## Copyright 2011 Tavendo GmbH ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. ## ############################################################################### from case import Case class Case7_3_2(Case): DESCRIPTION = """Send a close frame with payload length 1""" EXPECTATION = """Clean close with protocol error or drop TCP.""" def init(self): self.suppressClose = True def onConnectionLost(self, failedByMe): Case.onConnectionLost(self, failedByMe) if self.behaviorClose == Case.WRONG_CODE: self.behavior = Case.FAILED self.passed = False self.result = self.resultClose def onOpen(self): self.expected[Case.OK] = [] self.expectedClose = {"closedByMe":True,"closeCode":[self.p.CLOSE_STATUS_CODE_PROTOCOL_ERROR],"requireClean":False} self.p.sendCloseFrame(reasonUtf8 = "a") self.p.killAfter(1)
apache-2.0
colemanja91/PyEloqua-Examples
venv/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/requirements.py
454
4355
# This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. from __future__ import absolute_import, division, print_function import string import re from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine from pkg_resources.extern.pyparsing import Literal as L # noqa from pkg_resources.extern.six.moves.urllib import parse as urlparse from .markers import MARKER_EXPR, Marker from .specifiers import LegacySpecifier, Specifier, SpecifierSet class InvalidRequirement(ValueError): """ An invalid requirement was found, users should refer to PEP 508. """ ALPHANUM = Word(string.ascii_letters + string.digits) LBRACKET = L("[").suppress() RBRACKET = L("]").suppress() LPAREN = L("(").suppress() RPAREN = L(")").suppress() COMMA = L(",").suppress() SEMICOLON = L(";").suppress() AT = L("@").suppress() PUNCTUATION = Word("-_.") IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) NAME = IDENTIFIER("name") EXTRA = IDENTIFIER URI = Regex(r'[^ ]+')("url") URL = (AT + URI) EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False)("_raw_spec") _VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) _VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") MARKER_EXPR.setParseAction( lambda s, l, t: Marker(s[t._original_start:t._original_end]) ) MARKER_SEPERATOR = SEMICOLON MARKER = MARKER_SEPERATOR + MARKER_EXPR VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) URL_AND_MARKER = URL + Optional(MARKER) NAMED_REQUIREMENT = \ NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd class Requirement(object): """Parse a requirement. Parse a given requirement string into its parts, such as name, specifier, URL, and extras. Raises InvalidRequirement on a badly-formed requirement string. """ # TODO: Can we test whether something is contained within a requirement? # If so how do we do that? Do we need to test against the _name_ of # the thing as well as the version? What about the markers? # TODO: Can we normalize the name and extra name? def __init__(self, requirement_string): try: req = REQUIREMENT.parseString(requirement_string) except ParseException as e: raise InvalidRequirement( "Invalid requirement, parse error at \"{0!r}\"".format( requirement_string[e.loc:e.loc + 8])) self.name = req.name if req.url: parsed_url = urlparse.urlparse(req.url) if not (parsed_url.scheme and parsed_url.netloc) or ( not parsed_url.scheme and not parsed_url.netloc): raise InvalidRequirement("Invalid URL given") self.url = req.url else: self.url = None self.extras = set(req.extras.asList() if req.extras else []) self.specifier = SpecifierSet(req.specifier) self.marker = req.marker if req.marker else None def __str__(self): parts = [self.name] if self.extras: parts.append("[{0}]".format(",".join(sorted(self.extras)))) if self.specifier: parts.append(str(self.specifier)) if self.url: parts.append("@ {0}".format(self.url)) if self.marker: parts.append("; {0}".format(self.marker)) return "".join(parts) def __repr__(self): return "<Requirement({0!r})>".format(str(self))
gpl-2.0
dpo/nlpy
nlpy/model/adolcmodel.py
3
9361
from nlpy.model import NLPModel from nlpy.krylov import SimpleLinearOperator import adolc import numpy as np has_colpack = False class AdolcModel(NLPModel): """ A class to represent optimization problems in which derivatives are computed via algorithmic differentiation through ADOL-C. See the documentation of `NLPModel` for further information. """ # Count the number of instances of this class to generate # non-conflicting tape ids. Must be a mutable type. __NUM_INSTANCES = [-1] def __init__(self, n=0, m=0, name='Adolc-Generic', **kwargs): NLPModel.__init__(self, n, m, name, **kwargs) self.__class__.__NUM_INSTANCES[0] += 1 # Trace objective and constraint functions. self._obj_trace_id = None self._trace_obj(self.x0) self._con_trace_id = None if self.m > 0: self._trace_con(self.x0) self.first_sparse_hess_eval = True self.first_sparse_jac_eval = True def _get_trace_id(self): "Return an available trace id." return 100*self.__NUM_INSTANCES[0] def get_obj_trace_id(self): "Return the trace id for the objective function." return self._obj_trace_id def get_con_trace_id(self): "Return the trace id for the constraints." return self._con_trace_id def _trace_obj(self, x): if self._obj_trace_id is None: self._obj_trace_id = self._get_trace_id() adolc.trace_on(self._obj_trace_id) x = adolc.adouble(x) adolc.independent(x) y = self.obj(x) adolc.dependent(y) adolc.trace_off() def _trace_con(self, x): if self._con_trace_id is None and self.m > 0: self._con_trace_id = self._get_trace_id() + 1 adolc.trace_on(self._con_trace_id) x = adolc.adouble(x) adolc.independent(x) y = self.cons(x) adolc.dependent(y) adolc.trace_off() def _adolc_obj(self, x): "Evaluate the objective function from the ADOL-C tape." return adolc.function(self._obj_trace_id, x) def grad(self, x, **kwargs): "Evaluate the objective gradient at x." return self._adolc_grad(x, **kwargs) def _adolc_grad(self, x, **kwargs): "Evaluate the objective gradient from the ADOL-C tape." return adolc.gradient(self._obj_trace_id, x) def hess(self, x, z, **kwargs): "Return the Hessian of the objective at x." if has_colpack: return self.sparse_hess(x, z, **kwargs) return self.dense_hess(x, z, **kwargs) def dense_hess(self, x, z, **kwargs): "Return the Hessian of the objective at x in dense format." return adolc.hessian(self._obj_trace_id, x) def hprod(self, x, z, v, **kwargs): "Return the Hessian-vector product at x with v." return adolc.hess_vec(self._obj_trace_id, x, v) def sparse_hess(self, x, z, **kwargs): "Return the Hessian of the objective at x in sparse format." options = np.zeros(2,dtype=int) if self.first_sparse_hess_eval: nnz, rind, cind, values = \ adolc.colpack.sparse_hess_no_repeat(self._obj_trace_id, x, options=options) self.nnzH = nnz self.hess_rind = rind self.hess_cind = cind self.hess_values = values self.first_sparse_hess_eval = False return rind, cind, values else: return adolc.colpack.sparse_hess_repeat(self._obj_trace_id, x, self.hess_rind, self.hess_cind, self.hess_values) def _adolc_cons(self, x, **kwargs): "Evaluate the constraints from the ADOL-C tape." return adolc.function(self._con_trace_id, x) def jac(self, x, **kwargs): "Return constraints Jacobian at x." if has_colpack: return self.sparse_jac(x, **kwargs) return self.dense_jac(x, **kwargs) def dense_jac(self, x, **kwargs): "Return constraints Jacobian at x in dense format." return self._adolc_jac(x, **kwargs) def _adolc_jac(self, x, **kwargs): "Evaluate the constraints Jacobian from the ADOL-C tape." return adolc.jacobian(self._con_trace_id, x) def sparse_jac(self, x, **kwargs): "Return constraints Jacobian at x in sparse format." [nnz, rind, cind, values] =sparse_jac_no_repeat(tape_tag, x, options) options = np.zeros(4,dtype=int) if self.first_sparse_jac_eval: nnz, rind, cind, values = \ adolc.colpack.sparse_jac_no_repeat(self._con_trace_id, x, options=options) self.nnzJ = nnz self.jac_rind = rind self.jac_cind = cind self.jac_values = values self.first_sparse_jac_eval = False return rind, cind, values else: return adolc.colpack.sparse_jac_repeat(self._jac_trace_id, x, self.jac_rind, self.jac_cind, self.jac_values) def jac_vec(self, x, v, **kwargs): "Return the product of v with the Jacobian at x." return adolc.jac_vec(self._con_trace_id, x, v) def vec_jac(self, x, v, **kwargs): "Return the product of v with the transpose Jacobian at x." return adolc.vec_jac(self._con_trace_id, x, v) def get_jac_linop(self, x, **kwargs): "Return the Jacobian at x as a linear operator." J = SimpleLinearOperator(self.n, self.m, lambda v: self.jac_vec(x,v), matvec_transp=lambda v: self.vec_jac(x,v), symmetric=False) return J if __name__ == '__main__': from nlpy.optimize.solvers.lbfgs import LBFGSFramework from nlpy.optimize.solvers.ldfp import LDFPTrunkFramework from nlpy.optimize.solvers.trunk import TrunkFramework from nlpy.optimize.tr.trustregion import TrustRegionFramework as TR from nlpy.optimize.tr.trustregion import TrustRegionCG as TRSolver import nlpy.tools.logs import logging, sys # Define a few problems. class AdolcRosenbrock(AdolcModel): def obj(self, x, **kwargs): return np.sum( 100*(x[1:] - x[:-1]**2)**2 + (1 - x[:-1])**2 ) class AdolcHS7(AdolcModel): def obj(self, x, **kwargs): return np.log(1 + x[0]**2) - x[1] def cons(self, x, **kwargs): return (1 + x[0]**2)**2 + x[1]**2 - 4 nvar = 5 rosenbrock = AdolcRosenbrock(n=nvar, name='Rosenbrock', x0=-np.ones(nvar)) hs7 = AdolcHS7(n=2, m=1, name='HS7', x0=2*np.ones(2)) nlp = hs7 g = nlp.grad(nlp.x0) H = nlp.hess(nlp.x0, nlp.x0) #H_sparse = nlp.sparse_hess(nlp.x0, nlp.x0) print 'number of variables: ', nlp.n print 'initial guess: ', nlp.x0 print 'f(x0) = ', nlp.obj(nlp.x0) print 'g(x0) = ', g print 'H(x0) = ', H #print 'H_sparse(x0) = ', H_sparse if nlp.m > 0 : print 'number of constraints: ', nlp.m c = nlp.cons(nlp.x0) J = nlp.jac(nlp.x0) v = np.array([-1.,-1.]) w = np.array([2]) print 'c(x0) = ', c print 'J(x0) = ', J print 'J(x0) * [-1,1] = ', nlp.jac_vec(nlp.x0, v) print 'J(x0).T * [-2] = ', nlp.vec_jac(nlp.x0, w) # # Solve with linesearch-based L-BFGS method. # lbfgs = LBFGSFramework(nlp, npairs=5, scaling=True, silent=False) # lbfgs.solve() # # Create root logger. # log = logging.getLogger('adolcmodel') # log.setLevel(logging.INFO) # fmt = logging.Formatter('%(name)-15s %(levelname)-8s %(message)s') # hndlr = logging.StreamHandler(sys.stdout) # hndlr.setFormatter(fmt) # log.addHandler(hndlr) # # Configure the subproblem solver logger # nlpy.tools.logs.config_logger('adolcmodel.ldfp', # filemode='w', # stream=sys.stdout) # tr = TR(Delta=1.0, eta1=0.05, eta2=0.9, gamma1=0.25, gamma2=2.5) # # Solve with trust-region-based L-DFP method. # # ldfp = LDFPTrunkFramework(nlp, tr, TRSolver, # # ny=True, monotone=False, # # logger_name='adolcmodel.ldfp') # # ldfp.TR.Delta = 0.1 * np.linalg.norm(g) # Reset initial trust-region radius # # ldfp.Solve() # # Solve with trust-region-based method. # trnk = TrunkFramework(nlp, tr, TRSolver, # ny=True, monotone=False, # logger_name='adolcmodel.ldfp') # trnk.TR.Delta = 0.1 * np.linalg.norm(g) # Reset initial trust-region radius # trnk.Solve()
gpl-3.0
pshen/ansible
test/units/mock/yaml_helper.py
209
5267
import io import yaml from ansible.module_utils.six import PY3 from ansible.parsing.yaml.loader import AnsibleLoader from ansible.parsing.yaml.dumper import AnsibleDumper class YamlTestUtils(object): """Mixin class to combine with a unittest.TestCase subclass.""" def _loader(self, stream): """Vault related tests will want to override this. Vault cases should setup a AnsibleLoader that has the vault password.""" return AnsibleLoader(stream) def _dump_stream(self, obj, stream, dumper=None): """Dump to a py2-unicode or py3-string stream.""" if PY3: return yaml.dump(obj, stream, Dumper=dumper) else: return yaml.dump(obj, stream, Dumper=dumper, encoding=None) def _dump_string(self, obj, dumper=None): """Dump to a py2-unicode or py3-string""" if PY3: return yaml.dump(obj, Dumper=dumper) else: return yaml.dump(obj, Dumper=dumper, encoding=None) def _dump_load_cycle(self, obj): # Each pass though a dump or load revs the 'generation' # obj to yaml string string_from_object_dump = self._dump_string(obj, dumper=AnsibleDumper) # wrap a stream/file like StringIO around that yaml stream_from_object_dump = io.StringIO(string_from_object_dump) loader = self._loader(stream_from_object_dump) # load the yaml stream to create a new instance of the object (gen 2) obj_2 = loader.get_data() # dump the gen 2 objects directory to strings string_from_object_dump_2 = self._dump_string(obj_2, dumper=AnsibleDumper) # The gen 1 and gen 2 yaml strings self.assertEquals(string_from_object_dump, string_from_object_dump_2) # the gen 1 (orig) and gen 2 py object self.assertEquals(obj, obj_2) # again! gen 3... load strings into py objects stream_3 = io.StringIO(string_from_object_dump_2) loader_3 = self._loader(stream_3) obj_3 = loader_3.get_data() string_from_object_dump_3 = self._dump_string(obj_3, dumper=AnsibleDumper) self.assertEquals(obj, obj_3) # should be transitive, but... self.assertEquals(obj_2, obj_3) self.assertEquals(string_from_object_dump, string_from_object_dump_3) def _old_dump_load_cycle(self, obj): '''Dump the passed in object to yaml, load it back up, dump again, compare.''' stream = io.StringIO() yaml_string = self._dump_string(obj, dumper=AnsibleDumper) self._dump_stream(obj, stream, dumper=AnsibleDumper) yaml_string_from_stream = stream.getvalue() # reset stream stream.seek(0) loader = self._loader(stream) # loader = AnsibleLoader(stream, vault_password=self.vault_password) obj_from_stream = loader.get_data() stream_from_string = io.StringIO(yaml_string) loader2 = self._loader(stream_from_string) # loader2 = AnsibleLoader(stream_from_string, vault_password=self.vault_password) obj_from_string = loader2.get_data() stream_obj_from_stream = io.StringIO() stream_obj_from_string = io.StringIO() if PY3: yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper) yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper) else: yaml.dump(obj_from_stream, stream_obj_from_stream, Dumper=AnsibleDumper, encoding=None) yaml.dump(obj_from_stream, stream_obj_from_string, Dumper=AnsibleDumper, encoding=None) yaml_string_stream_obj_from_stream = stream_obj_from_stream.getvalue() yaml_string_stream_obj_from_string = stream_obj_from_string.getvalue() stream_obj_from_stream.seek(0) stream_obj_from_string.seek(0) if PY3: yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper) yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper) else: yaml_string_obj_from_stream = yaml.dump(obj_from_stream, Dumper=AnsibleDumper, encoding=None) yaml_string_obj_from_string = yaml.dump(obj_from_string, Dumper=AnsibleDumper, encoding=None) assert yaml_string == yaml_string_obj_from_stream assert yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string assert (yaml_string == yaml_string_obj_from_stream == yaml_string_obj_from_string == yaml_string_stream_obj_from_stream == yaml_string_stream_obj_from_string) assert obj == obj_from_stream assert obj == obj_from_string assert obj == yaml_string_obj_from_stream assert obj == yaml_string_obj_from_string assert obj == obj_from_stream == obj_from_string == yaml_string_obj_from_stream == yaml_string_obj_from_string return {'obj': obj, 'yaml_string': yaml_string, 'yaml_string_from_stream': yaml_string_from_stream, 'obj_from_stream': obj_from_stream, 'obj_from_string': obj_from_string, 'yaml_string_obj_from_string': yaml_string_obj_from_string}
gpl-3.0
c4mb0t/django-setman
testproject/core/migrations/0001_initial.py
2
4372
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'UserProfile' db.create_table('core_userprofile', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='profile', unique=True, to=orm['auth.User'])), ('role', self.gf('django.db.models.fields.CharField')(default='writer', max_length=32)), )) db.send_create_signal('core', ['UserProfile']) def backwards(self, orm): # Deleting model 'UserProfile' db.delete_table('core_userprofile') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'core.userprofile': { 'Meta': {'object_name': 'UserProfile'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'role': ('django.db.models.fields.CharField', [], {'default': "'writer'", 'max_length': '32'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}) } } complete_apps = ['core']
bsd-3-clause
andela-ifageyinbo/django
tests/m2m_intermediary/tests.py
381
1334
from __future__ import unicode_literals from datetime import datetime from django.test import TestCase from django.utils import six from .models import Article, Reporter, Writer class M2MIntermediaryTests(TestCase): def test_intermeiary(self): r1 = Reporter.objects.create(first_name="John", last_name="Smith") r2 = Reporter.objects.create(first_name="Jane", last_name="Doe") a = Article.objects.create( headline="This is a test", pub_date=datetime(2005, 7, 27) ) w1 = Writer.objects.create(reporter=r1, article=a, position="Main writer") w2 = Writer.objects.create(reporter=r2, article=a, position="Contributor") self.assertQuerysetEqual( a.writer_set.select_related().order_by("-position"), [ ("John Smith", "Main writer"), ("Jane Doe", "Contributor"), ], lambda w: (six.text_type(w.reporter), w.position) ) self.assertEqual(w1.reporter, r1) self.assertEqual(w2.reporter, r2) self.assertEqual(w1.article, a) self.assertEqual(w2.article, a) self.assertQuerysetEqual( r1.writer_set.all(), [ ("John Smith", "Main writer") ], lambda w: (six.text_type(w.reporter), w.position) )
bsd-3-clause
sbyoun/i-mapreduce
src/contrib/hod/hodlib/RingMaster/idleJobTracker.py
182
9106
#Licensed to the Apache Software Foundation (ASF) under one #or more contributor license agreements. See the NOTICE file #distributed with this work for additional information #regarding copyright ownership. The ASF licenses this file #to you under the Apache License, Version 2.0 (the #"License"); you may not use this file except in compliance #with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. import os, re, time from hodlib.Common.threads import loop, func from hodlib.Common.threads import simpleCommand from hodlib.Common.util import get_exception_string, hadoopVersion class HadoopJobStatus: """This class represents the status of a single Hadoop job""" def __init__(self, jobId, status): self.__jobId = jobId self.__status = status def getJobId(self): return self.__jobId def getStatus(self): return self.__status class HadoopClientException(Exception): """This class represents an exception that is raised when we fail in running the job client.""" def __init__(self, errorCode): self.errorCode = errorCode class JobTrackerMonitor: """This class monitors the JobTracker of an allocated cluster periodically to detect whether it is idle. If it is found to be idle for more than a configured limit, it calls back registered handlers who can act upon the idle cluster.""" def __init__(self, log, idleJTHandler, interval, limit, hadoopDir, javaHome, servInfoProvider): self.__log = log self.__idlenessLimit = limit self.__idleJobTrackerHandler = idleJTHandler self.__hadoopDir = hadoopDir hadoopPath = os.path.join(self.__hadoopDir, "bin", "hadoop") #hadoop directory can be from pkgs or a temp location like tarball. Verify once. if not os.path.exists(hadoopPath): raise Exception('Invalid Hadoop path specified: %s' % hadoopPath) self.__javaHome = javaHome # Note that when this object is created, we don't yet know the JT URL. # The service info provider will be polled until we get the URL. self.__serviceInfoProvider = servInfoProvider self.__jobCountRegExp = re.compile("([0-9]+) jobs currently running.*") self.__jobStatusRegExp = re.compile("(\S+)\s+(\d)\s+\d+\s+\S+$") self.__firstIdleTime = 0 self.__hadoop15Version = { 'major' : '0', 'minor' : '15' } #Assumption: we are not going to support versions older than 0.15 for Idle Job tracker. if not self.__isCompatibleHadoopVersion(self.__hadoop15Version): raise Exception('Incompatible Hadoop Version: Cannot check status') self.__stopFlag = False self.__jtURLFinderThread = func(name='JTURLFinderThread', functionRef=self.getJobTrackerURL) self.__jtMonitorThread = loop(name='JTMonitorThread', functionRef=self.monitorJobTracker, sleep=interval) self.__jobTrackerURL = None def start(self): """This method starts a thread that will determine the JobTracker URL""" self.__jtURLFinderThread.start() def stop(self): self.__log.debug('Joining the monitoring thread.') self.__stopFlag = True if self.__jtMonitorThread.isAlive(): self.__jtMonitorThread.join() self.__log.debug('Joined the monitoring thread.') def getJobTrackerURL(self): """This method periodically checks the service info provider for the JT URL""" self.__jobTrackerURL = self.__serviceInfoProvider.getServiceAddr('mapred') while not self.__stopFlag and not self.__isValidJobTrackerURL(): time.sleep(10) if not self.__stopFlag: self.__jobTrackerURL = self.__serviceInfoProvider.getServiceAddr('mapred') else: break if self.__isValidJobTrackerURL(): self.__log.debug('Got URL %s. Starting monitoring' % self.__jobTrackerURL) self.__jtMonitorThread.start() def monitorJobTracker(self): """This method is periodically called to monitor the JobTracker of the cluster.""" try: if self.__isIdle(): if self.__idleJobTrackerHandler: self.__log.info('Detected cluster as idle. Calling registered callback handler.') self.__idleJobTrackerHandler.handleIdleJobTracker() except: self.__log.debug('Exception while monitoring job tracker. %s' % get_exception_string()) def getJobsStatus(self): """This method should return the status of all jobs that are run on the HOD allocated hadoop cluster""" jobStatusList = [] try: hadoop16Version = { 'major' : '0', 'minor' : '16' } if self.__isCompatibleHadoopVersion(hadoop16Version): jtStatusCommand = self.__initStatusCommand(option='-list all') jtStatusCommand.start() jtStatusCommand.wait() jtStatusCommand.join() if jtStatusCommand.exit_code() == 0: for line in jtStatusCommand.output(): jobStatus = self.__extractJobStatus(line) if jobStatus is not None: jobStatusList.append(jobStatus) except: self.__log.debug('Exception while getting job statuses. %s' % get_exception_string()) return jobStatusList def __isValidJobTrackerURL(self): """This method checks that the passed in URL is not one of the special case strings returned by the getServiceAddr API""" return ((self.__jobTrackerURL != None) and (self.__jobTrackerURL != 'not found') \ and (not self.__jobTrackerURL.startswith('Error'))) def __extractJobStatus(self, line): """This method parses an output line from the job status command and creates the JobStatus object if there is a match""" jobStatus = None line = line.strip() jsMatch = self.__jobStatusRegExp.match(line) if jsMatch: jobStatus = HadoopJobStatus(jsMatch.group(1), int(jsMatch.group(2))) return jobStatus def __isIdle(self): """This method checks if the JobTracker is idle beyond a certain limit.""" jobCount = 0 err = False try: jobCount = self.__getJobCount() except HadoopClientException, hce: self.__log.debug('HadoopClientException handled in getting job count. \ Error code: %s' % hce.errorCode) err = True if (jobCount==0) or err: if self.__firstIdleTime == 0: #detecting idleness for the first time self.__firstIdleTime = time.time() else: if ((time.time()-self.__firstIdleTime) >= self.__idlenessLimit): self.__log.info('Idleness limit crossed for cluster') return True else: # reset idleness time self.__firstIdleTime = 0 return False def __getJobCount(self): """This method executes the hadoop job -list command and parses the output to detect the number of running jobs.""" # We assume here that the poll interval is small enough to detect running jobs. # If jobs start and stop within the poll interval, the cluster would be incorrectly # treated as idle. Hadoop 2266 will provide a better mechanism than this. jobs = -1 jtStatusCommand = self.__initStatusCommand() jtStatusCommand.start() jtStatusCommand.wait() jtStatusCommand.join() if jtStatusCommand.exit_code() == 0: for line in jtStatusCommand.output(): match = self.__jobCountRegExp.match(line) if match: jobs = int(match.group(1)) elif jtStatusCommand.exit_code() == 1: # for now, exit code 1 comes for any exception raised by JobClient. If hadoop gets # to differentiate and give more granular exit codes, we can check for those errors # corresponding to network errors etc. raise HadoopClientException(jtStatusCommand.exit_code()) return jobs def __isCompatibleHadoopVersion(self, expectedVersion): """This method determines whether the version of hadoop being used is one that is higher than the expectedVersion. This can be used for checking if a particular feature is available or not""" ver = hadoopVersion(self.__hadoopDir, self.__javaHome, self.__log) ret = False if (ver['major']!=None) and (int(ver['major']) >= int(expectedVersion['major'])) \ and (ver['minor']!=None) and (int(ver['minor']) >= int(expectedVersion['minor'])): ret = True return ret def __initStatusCommand(self, option="-list"): """This method initializes the command to run to check the JT status""" cmd = None hadoopPath = os.path.join(self.__hadoopDir, 'bin', 'hadoop') cmdStr = "%s job -jt %s" % (hadoopPath, self.__jobTrackerURL) cmdStr = "%s %s" % (cmdStr, option) self.__log.debug('cmd str %s' % cmdStr) env = os.environ env['JAVA_HOME'] = self.__javaHome cmd = simpleCommand('HadoopStatus', cmdStr, env) return cmd
apache-2.0
MichaelVL/osm-analytic-tracker
HumanTime.py
1
2895
import datetime, pytz import re import tzlocal def date2human(when, slack_secs=180): """ Convert timestamp to human-friendly times, like '3 minutes ago' """ if not when: return None now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) diff = now-when secs = diff.seconds days = diff.days if days > 0: return str(when) if secs >= 3600: if (secs<7200): return '1 hour ago' else: return str(secs/3600)+' hours ago' else: if (secs <slack_secs): return 'a moment ago' else: return str(secs/60)+' minutes ago' def human2date(when, past=True): """UTC timestamps from human 'encoding' like '2 hours ago'. Human timestamps are relative to local time zone.""" # This is not millisecond precise... local_tz = tzlocal.get_localzone() now = datetime.datetime.now().replace(tzinfo=local_tz) utcnow = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) if when == 'now': return utcnow if when == 'today': want = now.replace(hour=0, minute=0, second=0, microsecond=0) if not past: want += datetime.timedelta(days=1) newtime = utcnow-(now-want) return newtime if when == 'yesterday': want = now.replace(hour=0, minute=0, second=0, microsecond=0) newtime = utcnow-(now-want) return newtime-datetime.timedelta(days=1) in_past = in_future = False if when.endswith(' ago'): in_past = True if when.startswith('in ') or when.startswith('after '): in_future = True if in_past and in_future: raise TypeError('Time cannot be in the past and in the future') r = re.compile('(\d+) days?( ago)?') m = r.match(when) if m: td = datetime.timedelta(days=float(m.group(1))) return utcnow-td r = re.compile('(\d+) hours?( ago)?') m = r.match(when) if m: td = datetime.timedelta(hours=float(m.group(1))) return utcnow-td r = re.compile('(\d+) minutes?( ago)?') m = r.match(when) if m: td = datetime.timedelta(minutes=float(m.group(1))) return utcnow-td formats = ['%H:%M'] for fmt in formats: try: td = datetime.datetime.strptime(when, fmt).replace(tzinfo=local_tz) new = now if '%H' in fmt: new = new.replace(hour=td.hour) if '%M' in fmt: new = new.replace(minute=td.minute) if '%S' in fmt: new = new.replace(second=td.second) else: new = new.replace(second=0) if '%d' in fmt: new = new.replace(day=td.day) return new except ValueError: pass return datetime.datetime.strptime(when, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=pytz.utc)
gpl-2.0
apark263/tensorflow
tensorflow/python/kernel_tests/distributions/multinomial_test.py
22
14730
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.eager import backprop from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops.distributions import multinomial from tensorflow.python.platform import test class MultinomialTest(test.TestCase): def setUp(self): self._rng = np.random.RandomState(42) @test_util.run_v1_only("b/120545219") def testSimpleShapes(self): with self.cached_session(): p = [.1, .3, .6] dist = multinomial.Multinomial(total_count=1., probs=p) self.assertEqual(3, dist.event_shape_tensor().eval()) self.assertAllEqual([], dist.batch_shape_tensor().eval()) self.assertEqual(tensor_shape.TensorShape([3]), dist.event_shape) self.assertEqual(tensor_shape.TensorShape([]), dist.batch_shape) @test_util.run_v1_only("b/120545219") def testComplexShapes(self): with self.cached_session(): p = 0.5 * np.ones([3, 2, 2], dtype=np.float32) n = [[3., 2], [4, 5], [6, 7]] dist = multinomial.Multinomial(total_count=n, probs=p) self.assertEqual(2, dist.event_shape_tensor().eval()) self.assertAllEqual([3, 2], dist.batch_shape_tensor().eval()) self.assertEqual(tensor_shape.TensorShape([2]), dist.event_shape) self.assertEqual(tensor_shape.TensorShape([3, 2]), dist.batch_shape) @test_util.run_v1_only("b/120545219") def testN(self): p = [[0.1, 0.2, 0.7], [0.2, 0.3, 0.5]] n = [[3.], [4]] with self.cached_session(): dist = multinomial.Multinomial(total_count=n, probs=p) self.assertEqual((2, 1), dist.total_count.get_shape()) self.assertAllClose(n, dist.total_count.eval()) @test_util.run_v1_only("b/120545219") def testP(self): p = [[0.1, 0.2, 0.7]] with self.cached_session(): dist = multinomial.Multinomial(total_count=3., probs=p) self.assertEqual((1, 3), dist.probs.get_shape()) self.assertEqual((1, 3), dist.logits.get_shape()) self.assertAllClose(p, dist.probs.eval()) @test_util.run_v1_only("b/120545219") def testLogits(self): p = np.array([[0.1, 0.2, 0.7]], dtype=np.float32) logits = np.log(p) - 50. with self.cached_session(): multinom = multinomial.Multinomial(total_count=3., logits=logits) self.assertEqual((1, 3), multinom.probs.get_shape()) self.assertEqual((1, 3), multinom.logits.get_shape()) self.assertAllClose(p, multinom.probs.eval()) self.assertAllClose(logits, multinom.logits.eval()) @test_util.run_v1_only("b/120545219") def testPmfUnderflow(self): logits = np.array([[-200, 0]], dtype=np.float32) with self.cached_session(): dist = multinomial.Multinomial(total_count=1., logits=logits) lp = dist.log_prob([1., 0.]).eval()[0] self.assertAllClose(-200, lp, atol=0, rtol=1e-6) @test_util.run_v1_only("b/120545219") def testPmfandCountsAgree(self): p = [[0.1, 0.2, 0.7]] n = [[5.]] with self.cached_session(): dist = multinomial.Multinomial(total_count=n, probs=p, validate_args=True) dist.prob([2., 3, 0]).eval() dist.prob([3., 0, 2]).eval() with self.assertRaisesOpError("must be non-negative"): dist.prob([-1., 4, 2]).eval() with self.assertRaisesOpError("counts must sum to `self.total_count`"): dist.prob([3., 3, 0]).eval() @test_util.run_v1_only("b/120545219") def testPmfNonIntegerCounts(self): p = [[0.1, 0.2, 0.7]] n = [[5.]] with self.cached_session(): # No errors with integer n. multinom = multinomial.Multinomial( total_count=n, probs=p, validate_args=True) multinom.prob([2., 1, 2]).eval() multinom.prob([3., 0, 2]).eval() # Counts don't sum to n. with self.assertRaisesOpError("counts must sum to `self.total_count`"): multinom.prob([2., 3, 2]).eval() # Counts are non-integers. x = array_ops.placeholder(dtypes.float32) with self.assertRaisesOpError( "cannot contain fractional components."): multinom.prob(x).eval(feed_dict={x: [1.0, 2.5, 1.5]}) multinom = multinomial.Multinomial( total_count=n, probs=p, validate_args=False) multinom.prob([1., 2., 2.]).eval() # Non-integer arguments work. multinom.prob([1.0, 2.5, 1.5]).eval() def testPmfBothZeroBatches(self): with self.cached_session(): # Both zero-batches. No broadcast p = [0.5, 0.5] counts = [1., 0] pmf = multinomial.Multinomial(total_count=1., probs=p).prob(counts) self.assertAllClose(0.5, self.evaluate(pmf)) self.assertEqual((), pmf.get_shape()) def testPmfBothZeroBatchesNontrivialN(self): with self.cached_session(): # Both zero-batches. No broadcast p = [0.1, 0.9] counts = [3., 2] dist = multinomial.Multinomial(total_count=5., probs=p) pmf = dist.prob(counts) # 5 choose 3 = 5 choose 2 = 10. 10 * (.9)^2 * (.1)^3 = 81/10000. self.assertAllClose(81. / 10000, self.evaluate(pmf)) self.assertEqual((), pmf.get_shape()) def testPmfPStretchedInBroadcastWhenSameRank(self): with self.cached_session(): p = [[0.1, 0.9]] counts = [[1., 0], [0, 1]] pmf = multinomial.Multinomial(total_count=1., probs=p).prob(counts) self.assertAllClose([0.1, 0.9], self.evaluate(pmf)) self.assertEqual((2), pmf.get_shape()) def testPmfPStretchedInBroadcastWhenLowerRank(self): with self.cached_session(): p = [0.1, 0.9] counts = [[1., 0], [0, 1]] pmf = multinomial.Multinomial(total_count=1., probs=p).prob(counts) self.assertAllClose([0.1, 0.9], self.evaluate(pmf)) self.assertEqual((2), pmf.get_shape()) @test_util.run_v1_only("b/120545219") def testPmfCountsStretchedInBroadcastWhenSameRank(self): with self.cached_session(): p = [[0.1, 0.9], [0.7, 0.3]] counts = [[1., 0]] pmf = multinomial.Multinomial(total_count=1., probs=p).prob(counts) self.assertAllClose(pmf.eval(), [0.1, 0.7]) self.assertEqual((2), pmf.get_shape()) @test_util.run_v1_only("b/120545219") def testPmfCountsStretchedInBroadcastWhenLowerRank(self): with self.cached_session(): p = [[0.1, 0.9], [0.7, 0.3]] counts = [1., 0] pmf = multinomial.Multinomial(total_count=1., probs=p).prob(counts) self.assertAllClose(pmf.eval(), [0.1, 0.7]) self.assertEqual(pmf.get_shape(), (2)) def testPmfShapeCountsStretchedN(self): with self.cached_session(): # [2, 2, 2] p = [[[0.1, 0.9], [0.1, 0.9]], [[0.7, 0.3], [0.7, 0.3]]] # [2, 2] n = [[3., 3], [3, 3]] # [2] counts = [2., 1] pmf = multinomial.Multinomial(total_count=n, probs=p).prob(counts) self.evaluate(pmf) self.assertEqual(pmf.get_shape(), (2, 2)) def testPmfShapeCountsPStretchedN(self): with self.cached_session(): p = [0.1, 0.9] counts = [3., 2] n = np.full([4, 3], 5., dtype=np.float32) pmf = multinomial.Multinomial(total_count=n, probs=p).prob(counts) self.evaluate(pmf) self.assertEqual((4, 3), pmf.get_shape()) @test_util.run_v1_only("b/120545219") def testMultinomialMean(self): with self.cached_session(): n = 5. p = [0.1, 0.2, 0.7] dist = multinomial.Multinomial(total_count=n, probs=p) expected_means = 5 * np.array(p, dtype=np.float32) self.assertEqual((3,), dist.mean().get_shape()) self.assertAllClose(expected_means, dist.mean().eval()) @test_util.run_v1_only("b/120545219") def testMultinomialCovariance(self): with self.cached_session(): n = 5. p = [0.1, 0.2, 0.7] dist = multinomial.Multinomial(total_count=n, probs=p) expected_covariances = [[9. / 20, -1 / 10, -7 / 20], [-1 / 10, 4 / 5, -7 / 10], [-7 / 20, -7 / 10, 21 / 20]] self.assertEqual((3, 3), dist.covariance().get_shape()) self.assertAllClose(expected_covariances, dist.covariance().eval()) @test_util.run_v1_only("b/120545219") def testMultinomialCovarianceBatch(self): with self.cached_session(): # Shape [2] n = [5.] * 2 # Shape [4, 1, 2] p = [[[0.1, 0.9]], [[0.1, 0.9]]] * 2 dist = multinomial.Multinomial(total_count=n, probs=p) # Shape [2, 2] inner_var = [[9. / 20, -9 / 20], [-9 / 20, 9 / 20]] # Shape [4, 2, 2, 2] expected_covariances = [[inner_var, inner_var]] * 4 self.assertEqual((4, 2, 2, 2), dist.covariance().get_shape()) self.assertAllClose(expected_covariances, dist.covariance().eval()) def testCovarianceMultidimensional(self): # Shape [3, 5, 4] p = np.random.dirichlet([.25, .25, .25, .25], [3, 5]).astype(np.float32) # Shape [6, 3, 3] p2 = np.random.dirichlet([.3, .3, .4], [6, 3]).astype(np.float32) ns = np.random.randint(low=1, high=11, size=[3, 5]).astype(np.float32) ns2 = np.random.randint(low=1, high=11, size=[6, 1]).astype(np.float32) with self.cached_session(): dist = multinomial.Multinomial(ns, p) dist2 = multinomial.Multinomial(ns2, p2) covariance = dist.covariance() covariance2 = dist2.covariance() self.assertEqual((3, 5, 4, 4), covariance.get_shape()) self.assertEqual((6, 3, 3, 3), covariance2.get_shape()) @test_util.run_v1_only("b/120545219") def testCovarianceFromSampling(self): # We will test mean, cov, var, stddev on a DirichletMultinomial constructed # via broadcast between alpha, n. theta = np.array([[1., 2, 3], [2.5, 4, 0.01]], dtype=np.float32) theta /= np.sum(theta, 1)[..., array_ops.newaxis] n = np.array([[10., 9.], [8., 7.], [6., 5.]], dtype=np.float32) with self.cached_session() as sess: # batch_shape=[3, 2], event_shape=[3] dist = multinomial.Multinomial(n, theta) x = dist.sample(int(1000e3), seed=1) sample_mean = math_ops.reduce_mean(x, 0) x_centered = x - sample_mean[array_ops.newaxis, ...] sample_cov = math_ops.reduce_mean(math_ops.matmul( x_centered[..., array_ops.newaxis], x_centered[..., array_ops.newaxis, :]), 0) sample_var = array_ops.matrix_diag_part(sample_cov) sample_stddev = math_ops.sqrt(sample_var) [ sample_mean_, sample_cov_, sample_var_, sample_stddev_, analytic_mean, analytic_cov, analytic_var, analytic_stddev, ] = sess.run([ sample_mean, sample_cov, sample_var, sample_stddev, dist.mean(), dist.covariance(), dist.variance(), dist.stddev(), ]) self.assertAllClose(sample_mean_, analytic_mean, atol=0.01, rtol=0.01) self.assertAllClose(sample_cov_, analytic_cov, atol=0.01, rtol=0.01) self.assertAllClose(sample_var_, analytic_var, atol=0.01, rtol=0.01) self.assertAllClose(sample_stddev_, analytic_stddev, atol=0.01, rtol=0.01) @test_util.run_v1_only("b/120545219") def testSampleUnbiasedNonScalarBatch(self): with self.cached_session() as sess: dist = multinomial.Multinomial( total_count=[7., 6., 5.], logits=math_ops.log(2. * self._rng.rand(4, 3, 2).astype(np.float32))) n = int(3e4) x = dist.sample(n, seed=0) sample_mean = math_ops.reduce_mean(x, 0) # Cyclically rotate event dims left. x_centered = array_ops.transpose(x - sample_mean, [1, 2, 3, 0]) sample_covariance = math_ops.matmul( x_centered, x_centered, adjoint_b=True) / n [ sample_mean_, sample_covariance_, actual_mean_, actual_covariance_, ] = sess.run([ sample_mean, sample_covariance, dist.mean(), dist.covariance(), ]) self.assertAllEqual([4, 3, 2], sample_mean.get_shape()) self.assertAllClose(actual_mean_, sample_mean_, atol=0., rtol=0.10) self.assertAllEqual([4, 3, 2, 2], sample_covariance.get_shape()) self.assertAllClose( actual_covariance_, sample_covariance_, atol=0., rtol=0.20) @test_util.run_v1_only("b/120545219") def testSampleUnbiasedScalarBatch(self): with self.cached_session() as sess: dist = multinomial.Multinomial( total_count=5., logits=math_ops.log(2. * self._rng.rand(4).astype(np.float32))) n = int(5e3) x = dist.sample(n, seed=0) sample_mean = math_ops.reduce_mean(x, 0) x_centered = x - sample_mean # Already transposed to [n, 2]. sample_covariance = math_ops.matmul( x_centered, x_centered, adjoint_a=True) / n [ sample_mean_, sample_covariance_, actual_mean_, actual_covariance_, ] = sess.run([ sample_mean, sample_covariance, dist.mean(), dist.covariance(), ]) self.assertAllEqual([4], sample_mean.get_shape()) self.assertAllClose(actual_mean_, sample_mean_, atol=0., rtol=0.10) self.assertAllEqual([4, 4], sample_covariance.get_shape()) self.assertAllClose( actual_covariance_, sample_covariance_, atol=0., rtol=0.20) def testNotReparameterized(self): total_count = constant_op.constant(5.0) p = constant_op.constant([0.2, 0.6]) with backprop.GradientTape() as tape: tape.watch(total_count) tape.watch(p) dist = multinomial.Multinomial( total_count=total_count, probs=p) samples = dist.sample(100) grad_total_count, grad_p = tape.gradient(samples, [total_count, p]) self.assertIsNone(grad_total_count) self.assertIsNone(grad_p) if __name__ == "__main__": test.main()
apache-2.0
wikimedia/operations-debs-python-diamond
src/collectors/mongodb/mongodb.py
5
12712
# coding=utf-8 """ Collects all number values from the db.serverStatus() command, other values are ignored. #### Dependencies * pymongo #### Example Configuration MongoDBCollector.conf ``` enabled = True hosts = localhost:27017, alias1@localhost:27018, etc ``` """ import diamond.collector from diamond.collector import str_to_bool import re import zlib try: import pymongo pymongo # workaround for pyflakes issue #13 except ImportError: pymongo = None try: from pymongo import ReadPreference ReadPreference # workaround for pyflakes issue #13 except ImportError: ReadPreference = None class MongoDBCollector(diamond.collector.Collector): MAX_CRC32 = 4294967295 def __init__(self, *args, **kwargs): self.__totals = {} super(MongoDBCollector, self).__init__(*args, **kwargs) def get_default_config_help(self): config_help = super(MongoDBCollector, self).get_default_config_help() config_help.update({ 'hosts': 'Array of hostname(:port) elements to get metrics from' 'Set an alias by prefixing host:port with alias@', 'host': 'A single hostname(:port) to get metrics from' ' (can be used instead of hosts and overrides it)', 'user': 'Username for authenticated login (optional)', 'passwd': 'Password for authenticated login (optional)', 'databases': 'A regex of which databases to gather metrics for.' ' Defaults to all databases.', 'ignore_collections': 'A regex of which collections to ignore.' ' MapReduce temporary collections (tmp.mr.*)' ' are ignored by default.', 'collection_sample_rate': 'Only send stats for a consistent subset ' 'of collections. This is applied after collections are ignored via' ' ignore_collections Sampling uses crc32 so it is consistent across' ' replicas. Value between 0 and 1. Default is 1', 'network_timeout': 'Timeout for mongodb connection (in seconds).' ' There is no timeout by default.', 'simple': 'Only collect the same metrics as mongostat.', 'translate_collections': 'Translate dot (.) to underscores (_)' ' in collection names.', 'ssl': 'True to enable SSL connections to the MongoDB server.' ' Default is False' }) return config_help def get_default_config(self): """ Returns the default collector settings """ config = super(MongoDBCollector, self).get_default_config() config.update({ 'path': 'mongo', 'hosts': ['localhost'], 'user': None, 'passwd': None, 'databases': '.*', 'ignore_collections': '^tmp\.mr\.', 'network_timeout': None, 'simple': 'False', 'translate_collections': 'False', 'collection_sample_rate': 1, 'ssl': False }) return config def collect(self): """Collect number values from db.serverStatus()""" if pymongo is None: self.log.error('Unable to import pymongo') return hosts = self.config.get('hosts') # Convert a string config value to be an array if isinstance(hosts, basestring): hosts = [hosts] # we need this for backwards compatibility if 'host' in self.config: hosts = [self.config['host']] # convert network_timeout to integer if self.config['network_timeout']: self.config['network_timeout'] = int( self.config['network_timeout']) # convert collection_sample_rate to float if self.config['collection_sample_rate']: self.config['collection_sample_rate'] = float( self.config['collection_sample_rate']) # use auth if given if 'user' in self.config: user = self.config['user'] else: user = None if 'passwd' in self.config: passwd = self.config['passwd'] else: passwd = None for host in hosts: matches = re.search('((.+)\@)?(.+)?', host) alias = matches.group(2) host = matches.group(3) if alias is None: if len(hosts) == 1: # one host only, no need to have a prefix base_prefix = [] else: base_prefix = [re.sub('[:\.]', '_', host)] else: base_prefix = [alias] try: # Ensure that the SSL option is a boolean. if type(self.config['ssl']) is str: self.config['ssl'] = str_to_bool(self.config['ssl']) if ReadPreference is None: conn = pymongo.Connection( host, network_timeout=self.config['network_timeout'], ssl=self.config['ssl'], slave_okay=True ) else: conn = pymongo.Connection( host, network_timeout=self.config['network_timeout'], ssl=self.config['ssl'], read_preference=ReadPreference.SECONDARY, ) except Exception, e: self.log.error('Couldnt connect to mongodb: %s', e) continue # try auth if user: try: conn.admin.authenticate(user, passwd) except Exception, e: self.log.error('User auth given, but could not autheticate' + ' with host: %s, err: %s' % (host, e)) return{} data = conn.db.command('serverStatus') self._publish_transformed(data, base_prefix) if str_to_bool(self.config['simple']): data = self._extract_simple_data(data) self._publish_dict_with_prefix(data, base_prefix) db_name_filter = re.compile(self.config['databases']) ignored_collections = re.compile(self.config['ignore_collections']) sample_threshold = self.MAX_CRC32 * self.config[ 'collection_sample_rate'] for db_name in conn.database_names(): if not db_name_filter.search(db_name): continue db_stats = conn[db_name].command('dbStats') db_prefix = base_prefix + ['databases', db_name] self._publish_dict_with_prefix(db_stats, db_prefix) for collection_name in conn[db_name].collection_names(): if ignored_collections.search(collection_name): continue if (self.config['collection_sample_rate'] < 1 and ( zlib.crc32(collection_name) & 0xffffffff ) > sample_threshold): continue collection_stats = conn[db_name].command('collstats', collection_name) if str_to_bool(self.config['translate_collections']): collection_name = collection_name.replace('.', '_') collection_prefix = db_prefix + [collection_name] self._publish_dict_with_prefix(collection_stats, collection_prefix) def _publish_transformed(self, data, base_prefix): """ Publish values of type: counter or percent """ self._publish_dict_with_prefix(data.get('opcounters', {}), base_prefix + ['opcounters_per_sec'], self.publish_counter) self._publish_dict_with_prefix(data.get('opcountersRepl', {}), base_prefix + ['opcountersRepl_per_sec'], self.publish_counter) self._publish_metrics(base_prefix + ['backgroundFlushing_per_sec'], 'flushes', data.get('backgroundFlushing', {}), self.publish_counter) self._publish_dict_with_prefix(data.get('network', {}), base_prefix + ['network_per_sec'], self.publish_counter) self._publish_metrics(base_prefix + ['extra_info_per_sec'], 'page_faults', data.get('extra_info', {}), self.publish_counter) def get_dotted_value(data, key_name): key_name = key_name.split('.') for i in key_name: data = data.get(i, {}) if not data: return 0 return data def compute_interval(data, total_name): current_total = get_dotted_value(data, total_name) total_key = '.'.join(base_prefix + [total_name]) last_total = self.__totals.get(total_key, current_total) interval = current_total - last_total self.__totals[total_key] = current_total return interval def publish_percent(value_name, total_name, data): value = float(get_dotted_value(data, value_name) * 100) interval = compute_interval(data, total_name) key = '.'.join(base_prefix + ['percent', value_name]) self.publish_counter(key, value, time_delta=bool(interval), interval=interval) publish_percent('globalLock.lockTime', 'globalLock.totalTime', data) publish_percent('indexCounters.btree.misses', 'indexCounters.btree.accesses', data) locks = data.get('locks') if locks: if '.' in locks: locks['_global_'] = locks['.'] del (locks['.']) key_prefix = '.'.join(base_prefix + ['percent']) db_name_filter = re.compile(self.config['databases']) interval = compute_interval(data, 'uptimeMillis') for db_name in locks: if not db_name_filter.search(db_name): continue r = get_dotted_value( locks, '%s.timeLockedMicros.r' % db_name) R = get_dotted_value( locks, '.%s.timeLockedMicros.R' % db_name) value = float(r + R) / 10 if value: self.publish_counter( key_prefix + '.locks.%s.read' % db_name, value, time_delta=bool(interval), interval=interval) w = get_dotted_value( locks, '%s.timeLockedMicros.w' % db_name) W = get_dotted_value( locks, '%s.timeLockedMicros.W' % db_name) value = float(w + W) / 10 if value: self.publish_counter( key_prefix + '.locks.%s.write' % db_name, value, time_delta=bool(interval), interval=interval) def _publish_dict_with_prefix(self, dict, prefix, publishfn=None): for key in dict: self._publish_metrics(prefix, key, dict, publishfn) def _publish_metrics(self, prev_keys, key, data, publishfn=None): """Recursively publish keys""" if not key in data: return value = data[key] keys = prev_keys + [key] if not publishfn: publishfn = self.publish if isinstance(value, dict): for new_key in value: self._publish_metrics(keys, new_key, value) elif isinstance(value, int) or isinstance(value, float): publishfn('.'.join(keys), value) elif isinstance(value, long): publishfn('.'.join(keys), float(value)) def _extract_simple_data(self, data): return { 'connections': data.get('connections'), 'globalLock': data.get('globalLock'), 'indexCounters': data.get('indexCounters') }
mit
DataViva/dataviva-site
dataviva/apps/build_graph/views.py
1
3965
# -*- coding: utf-8 -*- import re from flask import Blueprint, render_template, g, jsonify, request from dataviva.apps.general.views import get_locale from dataviva.apps.embed.models import Build, App from dataviva.api.rais.services import Industry as CnaeService from dataviva.api.secex.services import Product as SecexService from dataviva.api.hedu.services import University as HeduService from dataviva.api.sc.services import Basic_course as ScService from dataviva.translations.dictionary import dictionary from sqlalchemy import not_ mod = Blueprint('build_graph', __name__, template_folder='templates', url_prefix='/<lang_code>/build_graph') @mod.before_request def before_request(): g.page_type = mod.name @mod.url_value_preprocessor def pull_lang_code(endpoint, values): g.locale = values.pop('lang_code') @mod.url_defaults def add_language_code(endpoint, values): values.setdefault('lang_code', get_locale()) def parse_filter_id(filter_id): if filter_id != 'all': return '<%>' else: return filter_id @mod.route('/') @mod.route('/<dataset>/<filter0>/<filter1>/<filter2>') def index(dataset=None, filter0=None, filter1=None, filter2=None): view = request.args.get('view') graph = request.args.get('graph') compare = request.args.get('compare') metadata = None build_query = Build.query.join(App).filter( Build.dataset == dataset, Build.filter1.like(parse_filter_id(filter1)), Build.filter2.like(parse_filter_id(filter2)), Build.slug2_en == view, App.type == graph) if graph: build = build_query.first_or_404() build.set_bra(filter0) if filter1 != 'all': build.set_filter1(filter1) if filter2 != 'all': build.set_filter2(filter2) service_id = filter1 if filter1 != u'all' else None year = ' - ' if dataset else '' if dataset == 'rais': year += str(CnaeService(service_id).get_year()) elif dataset == 'secex': year += str(SecexService(service_id).year()) elif dataset == 'hedu': year += str(HeduService(service_id).year()) elif dataset == 'sc': year += str(ScService(service_id).course_year()) title = re.sub(r'\s\(.*\)', r'', build.title()) metadata = { 'view': title, 'graph': dictionary()[graph], 'dataset': dictionary()[dataset] + year, } return render_template( 'build_graph/index.html', dataset=dataset, filter0=filter0, filter1=filter1, filter2=filter2, graph=graph, view=view, compare=compare, metadata=metadata) def parse_filter(filter): if filter != 'all': return '<%s>' % filter else: return filter @mod.route('/views/<dataset>/<bra>/<filter1>/<filter2>') def views(dataset, bra, filter1, filter2): '''/views/secex/hs/wld''' build_query = Build.query.filter( Build.dataset == dataset, Build.filter1 == parse_filter(filter1), Build.filter2 == parse_filter(filter2)) if bra != 'all': build_query.filter(not_(Build.bra.like('all'))) views = {} for build in build_query.all(): if bra == 'all' and build.app.type == 'compare': break if bra: build.set_bra(bra) if filter1 != 'all': build.set_filter1(request.args.get('filter1')) if filter2 != 'all': build.set_filter2(request.args.get('filter2')) title = re.sub(r'\s\(.*\)', r'', build.title()) id = build.slug2_en if id not in views: views[id] = { 'id': id, 'name': title, 'graphs': {}, } views[id]['graphs'][build.app.type] = { 'url': build.url(), 'name': build.app.name() } return jsonify(views=views)
mit
polyaxon/polyaxon-api
polyaxon_lib/polyaxonfile/local_runner.py
1
4049
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import atexit import json import os import signal import subprocess import sys import time from six.moves import xrange from multiprocessing import Process from tensorflow.python.platform import tf_logging as logging from polyaxon_schemas.polyaxonfile.polyaxonfile import PolyaxonFile from polyaxon_schemas.polyaxonfile.specification import Specification from polyaxon_schemas.utils import TaskType from polyaxon_lib.polyaxonfile.manager import ( prepare_all_experiment_jobs, start_experiment_run, ) jobs = [] processes = [] current_run = {'finished': False, TaskType.MASTER: None} def cleanup(): for job in jobs: job.terminate() job.join() # Register cleanup function to the exit of this module atexit.register(cleanup) def signal_handler(*args): for p in processes: p.terminate() current_run['finished'] = True def check_master_process(): print([p.is_alive() for p in jobs]) if not current_run['master'].is_alive(): signal_handler() cleanup() def get_pybin(): try: pybin = os.path.join(os.environ['VIRTUAL_ENV'], 'bin/python') except: # pylint: disable=bare-except pybin = sys.executable return pybin def run_cmd(pybin, cmd, cwd): env_cmd = '{} {}'.format(pybin, cmd) signal.signal(signal.SIGINT, signal_handler) logging.info(env_cmd) p = subprocess.Popen(env_cmd, cwd=cwd, shell=True) processes.append(p) _, error = p.communicate() if error: logging.error('{} - ERROR: '.format(error)) def create_process(env): cmd = ("""-c \"from polyaxon_lib.polyaxonfile.local_runner import start_experiment_run; start_experiment_run( '{polyaxonfile}', '{experiment_id}', '{task_type}', {task_id}, '{schedule}')\"""".format( **env)) p = Process(target=run_cmd, args=(get_pybin(), cmd, os.getcwd(),)) p.daemon = True p.start() jobs.append(p) if env['task_type'] == TaskType.MASTER: current_run[TaskType.MASTER] = p def run_experiment(spec_config, xp): spec = Specification.read(spec_config) logging.info("running Experiment n: {}".format(xp)) cluster, is_distributed = spec.cluster_def if not is_distributed: start_experiment_run(spec, xp, TaskType.MASTER, 0, 'continuous_train_and_eval') current_run['finished'] = True else: env = { 'polyaxonfile': json.dumps(spec.parsed_data), 'task_type': TaskType.MASTER, 'experiment_id': xp, 'task_id': 0, 'schedule': 'train_and_evaluate' } create_process(env) for i in xrange(cluster.get(TaskType.WORKER, 0)): env['task_id'] = i env['task_type'] = TaskType.WORKER env['schedule'] = 'train' create_process(env) for i in xrange(cluster.get(TaskType.PS, 0)): env['task_id'] = i env['task_type'] = TaskType.PS env['schedule'] = 'run_std_server' create_process(env) for job in jobs: job.join() def run(polyaxonfile): plx_file = PolyaxonFile.read(polyaxonfile) for xp in range(plx_file.matrix_space): run_experiment(plx_file.experiment_specs[xp], xp) while not current_run['finished']: check_master_process() time.sleep(10) current_run['finished'] = False current_run['master'] = None def run_all(polyaxonfile): plx_file = PolyaxonFile.read(polyaxonfile) for xp in range(plx_file.matrix_space): xp_jobs = prepare_all_experiment_jobs(plx_file.experiment_specs[xp], xp) for i, xp_job in enumerate(xp_jobs): if i == 0: schedule = 'train_and_evaluate' else: schedule = 'train' p = Process(target=getattr(xp_job, schedule)) p.start() jobs.append(p) for job in jobs: job.join()
mit
actuaryzhang/spark
examples/src/main/python/ml/dct_example.py
123
1509
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function # $example on$ from pyspark.ml.feature import DCT from pyspark.ml.linalg import Vectors # $example off$ from pyspark.sql import SparkSession if __name__ == "__main__": spark = SparkSession\ .builder\ .appName("DCTExample")\ .getOrCreate() # $example on$ df = spark.createDataFrame([ (Vectors.dense([0.0, 1.0, -2.0, 3.0]),), (Vectors.dense([-1.0, 2.0, 4.0, -7.0]),), (Vectors.dense([14.0, -2.0, -5.0, 1.0]),)], ["features"]) dct = DCT(inverse=False, inputCol="features", outputCol="featuresDCT") dctDf = dct.transform(df) dctDf.select("featuresDCT").show(truncate=False) # $example off$ spark.stop()
apache-2.0
FreeScienceCommunity/rt-thread
tools/package.py
45
2209
# # File : package.py # This file is part of RT-Thread RTOS # COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Change Logs: # Date Author Notes # 2015-04-10 Bernard First version # # this script is used to build group with package.json instead of SConscript import os from building import * def ExtendPackageVar(package, var): v = [] if not package.has_key(var): return v for item in package[var]: v = v + [item] return v def BuildPackage(package): import json f = file(package) package_json = f.read() # get package.json path cwd = os.path.dirname(package) package = json.loads(package_json) # check package name if not package.has_key('name'): return [] # get depends depend = ExtendPackageVar(package, 'depends') src = [] if package.has_key('source_files'): for src_file in package['source_files']: src_file = os.path.join(cwd, src_file) src += Glob(src_file) CPPPATH = [] if package.has_key('CPPPATH'): for path in package['CPPPATH']: if path.startswith('/') and os.path.isdir(path): CPPPATH = CPPPATH + [path] else: CPPPATH = CPPPATH + [os.path.join(cwd, path)] CPPDEFINES = ExtendPackageVar(package, 'CPPDEFINES') objs = DefineGroup(package['name'], src, depend = depend, CPPPATH = CPPPATH, CPPDEFINES = CPPDEFINES) return objs
gpl-2.0
tomhughes/mapnik
scons/scons-time.py
3
47967
#!/usr/bin/env python # # scons-time - run SCons timings and collect statistics # # A script for running a configuration through SCons with a standard # set of invocations to collect timing and memory statistics and to # capture the results in a consistent set of output files for display # and analysis. # # # Copyright (c) 2001 - 2021 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. __revision__ = "bin/scons-time.py 215860fd4f6bea67896c145660a035fad20cc41c 2021-01-19 19:32:22 bdbaddog" import getopt import glob import os import re import shutil import sys import tempfile import time def HACK_for_exec(cmd, *args): """ For some reason, Python won't allow an exec() within a function that also declares an internal function (including lambda functions). This function is a hack that calls exec() in a function with no internal functions. """ if not args: exec(cmd) elif len(args) == 1: exec(cmd, args[0]) else: exec(cmd, args[0], args[1]) class Plotter: def increment_size(self, largest): """ Return the size of each horizontal increment line for a specified maximum value. This returns a value that will provide somewhere between 5 and 9 horizontal lines on the graph, on some set of boundaries that are multiples of 10/100/1000/etc. """ i = largest // 5 if not i: return largest multiplier = 1 while i >= 10: i = i // 10 multiplier = multiplier * 10 return i * multiplier def max_graph_value(self, largest): # Round up to next integer. largest = int(largest) + 1 increment = self.increment_size(largest) return ((largest + increment - 1) // increment) * increment class Line: def __init__(self, points, type, title, label, comment, fmt="%s %s"): self.points = points self.type = type self.title = title self.label = label self.comment = comment self.fmt = fmt def print_label(self, inx, x, y): if self.label: print('set label %s "%s" at %0.1f,%0.1f right' % (inx, self.label, x, y)) def plot_string(self): if self.title: title_string = 'title "%s"' % self.title else: title_string = 'notitle' return "'-' %s with lines lt %s" % (title_string, self.type) def print_points(self, fmt=None): if fmt is None: fmt = self.fmt if self.comment: print('# %s' % self.comment) for x, y in self.points: # If y is None, it usually represents some kind of break # in the line's index number. We might want to represent # this some way rather than just drawing the line straight # between the two points on either side. if y is not None: print(fmt % (x, y)) print('e') def get_x_values(self): return [p[0] for p in self.points] def get_y_values(self): return [p[1] for p in self.points] class Gnuplotter(Plotter): def __init__(self, title, key_location): self.lines = [] self.title = title self.key_location = key_location def line(self, points, type, title=None, label=None, comment=None, fmt='%s %s'): if points: line = Line(points, type, title, label, comment, fmt) self.lines.append(line) def plot_string(self, line): return line.plot_string() def vertical_bar(self, x, type, label, comment): if self.get_min_x() <= x <= self.get_max_x(): points = [(x, 0), (x, self.max_graph_value(self.get_max_y()))] self.line(points, type, label, comment) def get_all_x_values(self): result = [] for line in self.lines: result.extend(line.get_x_values()) return [r for r in result if r is not None] def get_all_y_values(self): result = [] for line in self.lines: result.extend(line.get_y_values()) return [r for r in result if r is not None] def get_min_x(self): try: return self.min_x except AttributeError: try: self.min_x = min(self.get_all_x_values()) except ValueError: self.min_x = 0 return self.min_x def get_max_x(self): try: return self.max_x except AttributeError: try: self.max_x = max(self.get_all_x_values()) except ValueError: self.max_x = 0 return self.max_x def get_min_y(self): try: return self.min_y except AttributeError: try: self.min_y = min(self.get_all_y_values()) except ValueError: self.min_y = 0 return self.min_y def get_max_y(self): try: return self.max_y except AttributeError: try: self.max_y = max(self.get_all_y_values()) except ValueError: self.max_y = 0 return self.max_y def draw(self): if not self.lines: return if self.title: print('set title "%s"' % self.title) print('set key %s' % self.key_location) min_y = self.get_min_y() max_y = self.max_graph_value(self.get_max_y()) incr = (max_y - min_y) / 10.0 start = min_y + (max_y / 2.0) + (2.0 * incr) position = [start - (i * incr) for i in range(5)] inx = 1 for line in self.lines: line.print_label(inx, line.points[0][0] - 1, position[(inx - 1) % len(position)]) inx += 1 plot_strings = [self.plot_string(l) for l in self.lines] print('plot ' + ', \\\n '.join(plot_strings)) for line in self.lines: line.print_points() def untar(fname): import tarfile tar = tarfile.open(name=fname, mode='r') for tarinfo in tar: tar.extract(tarinfo) tar.close() def unzip(fname): import zipfile zf = zipfile.ZipFile(fname, 'r') for name in zf.namelist(): dir = os.path.dirname(name) try: os.makedirs(dir) except OSError: pass with open(name, 'wb') as f: f.write(zf.read(name)) def read_tree(dir): for dirpath, dirnames, filenames in os.walk(dir): for fn in filenames: fn = os.path.join(dirpath, fn) if os.path.isfile(fn): with open(fn, 'rb') as f: f.read() def redirect_to_file(command, log): return '%s > %s 2>&1' % (command, log) def tee_to_file(command, log): return '%s 2>&1 | tee %s' % (command, log) class SConsTimer: """ Usage: scons-time SUBCOMMAND [ARGUMENTS] Type "scons-time help SUBCOMMAND" for help on a specific subcommand. Available subcommands: func Extract test-run data for a function help Provides help mem Extract --debug=memory data from test runs obj Extract --debug=count data from test runs time Extract --debug=time data from test runs run Runs a test configuration """ name = 'scons-time' name_spaces = ' ' * len(name) def makedict(**kw): return kw default_settings = makedict( chdir=None, config_file=None, initial_commands=[], key_location='bottom left', orig_cwd=os.getcwd(), outdir=None, prefix='', python='"%s"' % sys.executable, redirect=redirect_to_file, scons=None, scons_flags='--debug=count --debug=memory --debug=time --debug=memoizer', scons_lib_dir=None, scons_wrapper=None, startup_targets='--help', subdir=None, subversion_url=None, svn='svn', svn_co_flag='-q', tar='tar', targets='', targets0=None, targets1=None, targets2=None, title=None, unzip='unzip', verbose=False, vertical_bars=[], unpack_map={ '.tar.gz': (untar, '%(tar)s xzf %%s'), '.tgz': (untar, '%(tar)s xzf %%s'), '.tar': (untar, '%(tar)s xf %%s'), '.zip': (unzip, '%(unzip)s %%s'), }, ) run_titles = [ 'Startup', 'Full build', 'Up-to-date build', ] run_commands = [ '%(python)s %(scons_wrapper)s %(scons_flags)s --profile=%(prof0)s %(targets0)s', '%(python)s %(scons_wrapper)s %(scons_flags)s --profile=%(prof1)s %(targets1)s', '%(python)s %(scons_wrapper)s %(scons_flags)s --profile=%(prof2)s %(targets2)s', ] stages = [ 'pre-read', 'post-read', 'pre-build', 'post-build', ] stage_strings = { 'pre-read': 'Memory before reading SConscript files:', 'post-read': 'Memory after reading SConscript files:', 'pre-build': 'Memory before building targets:', 'post-build': 'Memory after building targets:', } memory_string_all = 'Memory ' default_stage = stages[-1] time_strings = { 'total': 'Total build time', 'SConscripts': 'Total SConscript file execution time', 'SCons': 'Total SCons execution time', 'commands': 'Total command execution time', } time_string_all = 'Total .* time' # def __init__(self): self.__dict__.update(self.default_settings) # Functions for displaying and executing commands. def subst(self, x, dictionary): try: return x % dictionary except TypeError: # x isn't a string (it's probably a Python function), # so just return it. return x def subst_variables(self, command, dictionary): """ Substitutes (via the format operator) the values in the specified dictionary into the specified command. The command can be an (action, string) tuple. In all cases, we perform substitution on strings and don't worry if something isn't a string. (It's probably a Python function to be executed.) """ try: command + '' except TypeError: action = command[0] string = command[1] args = command[2:] else: action = command string = action args = (()) action = self.subst(action, dictionary) string = self.subst(string, dictionary) return (action, string, args) def _do_not_display(self, msg, *args): pass def display(self, msg, *args): """ Displays the specified message. Each message is prepended with a standard prefix of our name plus the time. """ if callable(msg): msg = msg(*args) else: msg = msg % args if msg is None: return fmt = '%s[%s]: %s\n' sys.stdout.write(fmt % (self.name, time.strftime('%H:%M:%S'), msg)) def _do_not_execute(self, action, *args): pass def execute(self, action, *args): """ Executes the specified action. The action is called if it's a callable Python function, and otherwise passed to os.system(). """ if callable(action): action(*args) else: os.system(action % args) def run_command_list(self, commands, dict): """ Executes a list of commands, substituting values from the specified dictionary. """ commands = [self.subst_variables(c, dict) for c in commands] for action, string, args in commands: self.display(string, *args) sys.stdout.flush() status = self.execute(action, *args) if status: sys.exit(status) def log_display(self, command, log): command = self.subst(command, self.__dict__) if log: command = self.redirect(command, log) return command def log_execute(self, command, log): command = self.subst(command, self.__dict__) p = os.popen(command) output = p.read() p.close() # TODO: convert to subrocess, os.popen is obsolete. This didn't work: # process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True) # output = process.stdout.read() # process.stdout.close() # process.wait() if self.verbose: sys.stdout.write(output) # TODO: Figure out # Not sure we need to write binary here with open(log, 'w') as f: f.write(str(output)) def archive_splitext(self, path): """ Splits an archive name into a filename base and extension. This is like os.path.splitext() (which it calls) except that it also looks for '.tar.gz' and treats it as an atomic extensions. """ if path.endswith('.tar.gz'): return path[:-7], path[-7:] else: return os.path.splitext(path) def args_to_files(self, args, tail=None): """ Takes a list of arguments, expands any glob patterns, and returns the last "tail" files from the list. """ files = [] for a in args: files.extend(sorted(glob.glob(a))) if tail: files = files[-tail:] return files def ascii_table(self, files, columns, line_function, file_function=lambda x: x, *args, **kw): header_fmt = ' '.join(['%12s'] * len(columns)) line_fmt = header_fmt + ' %s' print(header_fmt % columns) for file in files: t = line_function(file, *args, **kw) if t is None: t = [] diff = len(columns) - len(t) if diff > 0: t += [''] * diff t.append(file_function(file)) print(line_fmt % tuple(t)) def collect_results(self, files, function, *args, **kw): results = {} for file in files: base = os.path.splitext(file)[0] run, index = base.split('-')[-2:] run = int(run) index = int(index) value = function(file, *args, **kw) try: r = results[index] except KeyError: r = [] results[index] = r r.append((run, value)) return results def doc_to_help(self, obj): """ Translates an object's __doc__ string into help text. This strips a consistent number of spaces from each line in the help text, essentially "outdenting" the text to the left-most column. """ doc = obj.__doc__ if doc is None: return '' return self.outdent(doc) def find_next_run_number(self, dir, prefix): """ Returns the next run number in a directory for the specified prefix. Examines the contents the specified directory for files with the specified prefix, extracts the run numbers from each file name, and returns the next run number after the largest it finds. """ x = re.compile(re.escape(prefix) + '-([0-9]+).*') matches = [x.match(e) for e in os.listdir(dir)] matches = [_f for _f in matches if _f] if not matches: return 0 run_numbers = [int(m.group(1)) for m in matches] return int(max(run_numbers)) + 1 def gnuplot_results(self, results, fmt='%s %.3f'): """ Prints out a set of results in Gnuplot format. """ gp = Gnuplotter(self.title, self.key_location) for i in sorted(results.keys()): try: t = self.run_titles[i] except IndexError: t = '??? %s ???' % i results[i].sort() gp.line(results[i], i + 1, t, None, t, fmt=fmt) for bar_tuple in self.vertical_bars: try: x, type, label, comment = bar_tuple except ValueError: x, type, label = bar_tuple comment = label gp.vertical_bar(x, type, label, comment) gp.draw() def logfile_name(self, invocation): """ Returns the absolute path of a log file for the specificed invocation number. """ name = self.prefix_run + '-%d.log' % invocation return os.path.join(self.outdir, name) def outdent(self, s): """ Strip as many spaces from each line as are found at the beginning of the first line in the list. """ lines = s.split('\n') if lines[0] == '': lines = lines[1:] spaces = re.match(' *', lines[0]).group(0) def strip_initial_spaces(line, s=spaces): if line.startswith(spaces): line = line[len(spaces):] return line return '\n'.join([strip_initial_spaces(l) for l in lines]) + '\n' def profile_name(self, invocation): """ Returns the absolute path of a profile file for the specified invocation number. """ name = self.prefix_run + '-%d.prof' % invocation return os.path.join(self.outdir, name) def set_env(self, key, value): os.environ[key] = value # def get_debug_times(self, file, time_string=None): """ Fetch times from the --debug=time strings in the specified file. """ if time_string is None: search_string = self.time_string_all else: search_string = time_string with open(file) as f: contents = f.read() if not contents: sys.stderr.write('file %s has no contents!\n' % repr(file)) return None result = re.findall(r'%s: ([\d.]*)' % search_string, contents)[-4:] result = [float(r) for r in result] if time_string is not None: try: result = result[0] except IndexError: sys.stderr.write('file %s has no results!\n' % repr(file)) return None return result def get_function_profile(self, file, function): """ Returns the file, line number, function name, and cumulative time. """ try: import pstats except ImportError as e: sys.stderr.write('%s: func: %s\n' % (self.name, e)) sys.stderr.write('%s This version of Python is missing the profiler.\n' % self.name_spaces) sys.stderr.write('%s Cannot use the "func" subcommand.\n' % self.name_spaces) sys.exit(1) statistics = pstats.Stats(file).stats matches = [e for e in statistics.items() if e[0][2] == function] r = matches[0] return r[0][0], r[0][1], r[0][2], r[1][3] def get_function_time(self, file, function): """ Returns just the cumulative time for the specified function. """ return self.get_function_profile(file, function)[3] def get_memory(self, file, memory_string=None): """ Returns a list of integers of the amount of memory used. The default behavior is to return all the stages. """ if memory_string is None: search_string = self.memory_string_all else: search_string = memory_string with open(file) as f: lines = f.readlines() lines = [l for l in lines if l.startswith(search_string)][-4:] result = [int(l.split()[-1]) for l in lines[-4:]] if len(result) == 1: result = result[0] return result def get_object_counts(self, file, object_name, index=None): """ Returns the counts of the specified object_name. """ object_string = ' ' + object_name + '\n' with open(file) as f: lines = f.readlines() line = [l for l in lines if l.endswith(object_string)][0] result = [int(field) for field in line.split()[:4]] if index is not None: result = result[index] return result command_alias = {} def execute_subcommand(self, argv): """ Executes the do_*() function for the specified subcommand (argv[0]). """ if not argv: return cmdName = self.command_alias.get(argv[0], argv[0]) try: func = getattr(self, 'do_' + cmdName) except AttributeError: return self.default(argv) try: return func(argv) except TypeError as e: sys.stderr.write("%s %s: %s\n" % (self.name, cmdName, e)) import traceback traceback.print_exc(file=sys.stderr) sys.stderr.write("Try '%s help %s'\n" % (self.name, cmdName)) def default(self, argv): """ The default behavior for an unknown subcommand. Prints an error message and exits. """ sys.stderr.write('%s: Unknown subcommand "%s".\n' % (self.name, argv[0])) sys.stderr.write('Type "%s help" for usage.\n' % self.name) sys.exit(1) # def do_help(self, argv): """ """ if argv[1:]: for arg in argv[1:]: try: func = getattr(self, 'do_' + arg) except AttributeError: sys.stderr.write('%s: No help for "%s"\n' % (self.name, arg)) else: try: help = getattr(self, 'help_' + arg) except AttributeError: sys.stdout.write(self.doc_to_help(func)) sys.stdout.flush() else: help() else: doc = self.doc_to_help(self.__class__) if doc: sys.stdout.write(doc) sys.stdout.flush() return None # def help_func(self): help = """\ Usage: scons-time func [OPTIONS] FILE [...] -C DIR, --chdir=DIR Change to DIR before looking for files -f FILE, --file=FILE Read configuration from specified FILE --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT --func=NAME, --function=NAME Report time for function NAME -h, --help Print this help and exit -p STRING, --prefix=STRING Use STRING as log file/profile prefix -t NUMBER, --tail=NUMBER Only report the last NUMBER files --title=TITLE Specify the output plot TITLE """ sys.stdout.write(self.outdent(help)) sys.stdout.flush() def do_func(self, argv): """ """ format = 'ascii' function_name = '_main' tail = None short_opts = '?C:f:hp:t:' long_opts = [ 'chdir=', 'file=', 'fmt=', 'format=', 'func=', 'function=', 'help', 'prefix=', 'tail=', 'title=', ] opts, args = getopt.getopt(argv[1:], short_opts, long_opts) for o, a in opts: if o in ('-C', '--chdir'): self.chdir = a elif o in ('-f', '--file'): self.config_file = a elif o in ('--fmt', '--format'): format = a elif o in ('--func', '--function'): function_name = a elif o in ('-?', '-h', '--help'): self.do_help(['help', 'func']) sys.exit(0) elif o in ('--max',): max_time = int(a) elif o in ('-p', '--prefix'): self.prefix = a elif o in ('-t', '--tail'): tail = int(a) elif o in ('--title',): self.title = a if self.config_file: with open(self.config_file, 'r') as f: config = f.read() exec(config, self.__dict__) if self.chdir: os.chdir(self.chdir) if not args: pattern = '%s*.prof' % self.prefix args = self.args_to_files([pattern], tail) if not args: if self.chdir: directory = self.chdir else: directory = os.getcwd() sys.stderr.write('%s: func: No arguments specified.\n' % self.name) sys.stderr.write('%s No %s*.prof files found in "%s".\n' % (self.name_spaces, self.prefix, directory)) sys.stderr.write('%s Type "%s help func" for help.\n' % (self.name_spaces, self.name)) sys.exit(1) else: args = self.args_to_files(args, tail) cwd_ = os.getcwd() + os.sep if format == 'ascii': for file in args: try: f, line, func, time = \ self.get_function_profile(file, function_name) except ValueError as e: sys.stderr.write("%s: func: %s: %s\n" % (self.name, file, e)) else: if f.startswith(cwd_): f = f[len(cwd_):] print("%.3f %s:%d(%s)" % (time, f, line, func)) elif format == 'gnuplot': results = self.collect_results(args, self.get_function_time, function_name) self.gnuplot_results(results) else: sys.stderr.write('%s: func: Unknown format "%s".\n' % (self.name, format)) sys.exit(1) # def help_mem(self): help = """\ Usage: scons-time mem [OPTIONS] FILE [...] -C DIR, --chdir=DIR Change to DIR before looking for files -f FILE, --file=FILE Read configuration from specified FILE --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT -h, --help Print this help and exit -p STRING, --prefix=STRING Use STRING as log file/profile prefix --stage=STAGE Plot memory at the specified stage: pre-read, post-read, pre-build, post-build (default: post-build) -t NUMBER, --tail=NUMBER Only report the last NUMBER files --title=TITLE Specify the output plot TITLE """ sys.stdout.write(self.outdent(help)) sys.stdout.flush() def do_mem(self, argv): format = 'ascii' def _logfile_path(x): return x logfile_path = _logfile_path stage = self.default_stage tail = None short_opts = '?C:f:hp:t:' long_opts = [ 'chdir=', 'file=', 'fmt=', 'format=', 'help', 'prefix=', 'stage=', 'tail=', 'title=', ] opts, args = getopt.getopt(argv[1:], short_opts, long_opts) for o, a in opts: if o in ('-C', '--chdir'): self.chdir = a elif o in ('-f', '--file'): self.config_file = a elif o in ('--fmt', '--format'): format = a elif o in ('-?', '-h', '--help'): self.do_help(['help', 'mem']) sys.exit(0) elif o in ('-p', '--prefix'): self.prefix = a elif o in ('--stage',): if a not in self.stages: sys.stderr.write('%s: mem: Unrecognized stage "%s".\n' % (self.name, a)) sys.exit(1) stage = a elif o in ('-t', '--tail'): tail = int(a) elif o in ('--title',): self.title = a if self.config_file: with open(self.config_file, 'r') as f: config = f.read() HACK_for_exec(config, self.__dict__) if self.chdir: os.chdir(self.chdir) def _logfile_path_join(x): return os.path.join(self.chdir, x) logfile_path = _logfile_path_join if not args: pattern = '%s*.log' % self.prefix args = self.args_to_files([pattern], tail) if not args: if self.chdir: directory = self.chdir else: directory = os.getcwd() sys.stderr.write('%s: mem: No arguments specified.\n' % self.name) sys.stderr.write('%s No %s*.log files found in "%s".\n' % (self.name_spaces, self.prefix, directory)) sys.stderr.write('%s Type "%s help mem" for help.\n' % (self.name_spaces, self.name)) sys.exit(1) else: args = self.args_to_files(args, tail) # cwd_ = os.getcwd() + os.sep if format == 'ascii': self.ascii_table(args, tuple(self.stages), self.get_memory, logfile_path) elif format == 'gnuplot': results = self.collect_results(args, self.get_memory, self.stage_strings[stage]) self.gnuplot_results(results) else: sys.stderr.write('%s: mem: Unknown format "%s".\n' % (self.name, format)) sys.exit(1) return 0 # def help_obj(self): help = """\ Usage: scons-time obj [OPTIONS] OBJECT FILE [...] -C DIR, --chdir=DIR Change to DIR before looking for files -f FILE, --file=FILE Read configuration from specified FILE --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT -h, --help Print this help and exit -p STRING, --prefix=STRING Use STRING as log file/profile prefix --stage=STAGE Plot memory at the specified stage: pre-read, post-read, pre-build, post-build (default: post-build) -t NUMBER, --tail=NUMBER Only report the last NUMBER files --title=TITLE Specify the output plot TITLE """ sys.stdout.write(self.outdent(help)) sys.stdout.flush() def do_obj(self, argv): format = 'ascii' def _logfile_path(x): return x logfile_path = _logfile_path stage = self.default_stage tail = None short_opts = '?C:f:hp:t:' long_opts = [ 'chdir=', 'file=', 'fmt=', 'format=', 'help', 'prefix=', 'stage=', 'tail=', 'title=', ] opts, args = getopt.getopt(argv[1:], short_opts, long_opts) for o, a in opts: if o in ('-C', '--chdir'): self.chdir = a elif o in ('-f', '--file'): self.config_file = a elif o in ('--fmt', '--format'): format = a elif o in ('-?', '-h', '--help'): self.do_help(['help', 'obj']) sys.exit(0) elif o in ('-p', '--prefix'): self.prefix = a elif o in ('--stage',): if a not in self.stages: sys.stderr.write('%s: obj: Unrecognized stage "%s".\n' % (self.name, a)) sys.stderr.write('%s Type "%s help obj" for help.\n' % (self.name_spaces, self.name)) sys.exit(1) stage = a elif o in ('-t', '--tail'): tail = int(a) elif o in ('--title',): self.title = a if not args: sys.stderr.write('%s: obj: Must specify an object name.\n' % self.name) sys.stderr.write('%s Type "%s help obj" for help.\n' % (self.name_spaces, self.name)) sys.exit(1) object_name = args.pop(0) if self.config_file: with open(self.config_file, 'r') as f: config = f.read() HACK_for_exec(config, self.__dict__) if self.chdir: os.chdir(self.chdir) def _logfile_path_join(x): return os.path.join(self.chdir, x) logfile_path = _logfile_path_join if not args: pattern = '%s*.log' % self.prefix args = self.args_to_files([pattern], tail) if not args: if self.chdir: directory = self.chdir else: directory = os.getcwd() sys.stderr.write('%s: obj: No arguments specified.\n' % self.name) sys.stderr.write('%s No %s*.log files found in "%s".\n' % (self.name_spaces, self.prefix, directory)) sys.stderr.write('%s Type "%s help obj" for help.\n' % (self.name_spaces, self.name)) sys.exit(1) else: args = self.args_to_files(args, tail) cwd_ = os.getcwd() + os.sep if format == 'ascii': self.ascii_table(args, tuple(self.stages), self.get_object_counts, logfile_path, object_name) elif format == 'gnuplot': stage_index = 0 for s in self.stages: if stage == s: break stage_index = stage_index + 1 results = self.collect_results(args, self.get_object_counts, object_name, stage_index) self.gnuplot_results(results) else: sys.stderr.write('%s: obj: Unknown format "%s".\n' % (self.name, format)) sys.exit(1) return 0 # def help_run(self): help = """\ Usage: scons-time run [OPTIONS] [FILE ...] --chdir=DIR Name of unpacked directory for chdir -f FILE, --file=FILE Read configuration from specified FILE -h, --help Print this help and exit -n, --no-exec No execute, just print command lines --number=NUMBER Put output in files for run NUMBER --outdir=OUTDIR Put output files in OUTDIR -p STRING, --prefix=STRING Use STRING as log file/profile prefix --python=PYTHON Time using the specified PYTHON -q, --quiet Don't print command lines --scons=SCONS Time using the specified SCONS --svn=URL, --subversion=URL Use SCons from Subversion URL -v, --verbose Display output of commands """ sys.stdout.write(self.outdent(help)) sys.stdout.flush() def do_run(self, argv): """ """ run_number_list = [None] short_opts = '?f:hnp:qs:v' long_opts = [ 'file=', 'help', 'no-exec', 'number=', 'outdir=', 'prefix=', 'python=', 'quiet', 'scons=', 'svn=', 'subdir=', 'subversion=', 'verbose', ] opts, args = getopt.getopt(argv[1:], short_opts, long_opts) for o, a in opts: if o in ('-f', '--file'): self.config_file = a elif o in ('-?', '-h', '--help'): self.do_help(['help', 'run']) sys.exit(0) elif o in ('-n', '--no-exec'): self.execute = self._do_not_execute elif o in ('--number',): run_number_list = self.split_run_numbers(a) elif o in ('--outdir',): self.outdir = a elif o in ('-p', '--prefix'): self.prefix = a elif o in ('--python',): self.python = a elif o in ('-q', '--quiet'): self.display = self._do_not_display elif o in ('-s', '--subdir'): self.subdir = a elif o in ('--scons',): self.scons = a elif o in ('--svn', '--subversion'): self.subversion_url = a elif o in ('-v', '--verbose'): self.redirect = tee_to_file self.verbose = True self.svn_co_flag = '' if not args and not self.config_file: sys.stderr.write('%s: run: No arguments or -f config file specified.\n' % self.name) sys.stderr.write('%s Type "%s help run" for help.\n' % (self.name_spaces, self.name)) sys.exit(1) if self.config_file: with open(self.config_file, 'r') as f: config = f.read() exec(config, self.__dict__) if args: self.archive_list = args archive_file_name = os.path.split(self.archive_list[0])[1] if not self.subdir: self.subdir = self.archive_splitext(archive_file_name)[0] if not self.prefix: self.prefix = self.archive_splitext(archive_file_name)[0] prepare = None if self.subversion_url: prepare = self.prep_subversion_run for run_number in run_number_list: self.individual_run(run_number, self.archive_list, prepare) def split_run_numbers(self, s): result = [] for n in s.split(','): try: x, y = n.split('-') except ValueError: result.append(int(n)) else: result.extend(list(range(int(x), int(y) + 1))) return result def scons_path(self, dir): return os.path.join(dir, 'scripts', 'scons.py') def scons_lib_dir_path(self, dir): """build the path to the engine. this used to join src/engine, but no longer. """ return dir def prep_subversion_run(self, commands, removals): self.svn_tmpdir = tempfile.mkdtemp(prefix=self.name + '-svn-') removals.append((shutil.rmtree, 'rm -rf %%s', self.svn_tmpdir)) self.scons = self.scons_path(self.svn_tmpdir) self.scons_lib_dir = self.scons_lib_dir_path(self.svn_tmpdir) commands.extend([ '%(svn)s co %(svn_co_flag)s -r %(run_number)s %(subversion_url)s %(svn_tmpdir)s', ]) def individual_run(self, run_number, archive_list, prepare=None): """ Performs an individual run of the default SCons invocations. """ commands = [] removals = [] if prepare: prepare(commands, removals) save_scons = self.scons save_scons_wrapper = self.scons_wrapper save_scons_lib_dir = self.scons_lib_dir if self.outdir is None: self.outdir = self.orig_cwd elif not os.path.isabs(self.outdir): self.outdir = os.path.join(self.orig_cwd, self.outdir) if self.scons is None: self.scons = self.scons_path(self.orig_cwd) if self.scons_lib_dir is None: self.scons_lib_dir = self.scons_lib_dir_path(self.orig_cwd) if self.scons_wrapper is None: self.scons_wrapper = self.scons if not run_number: run_number = self.find_next_run_number(self.outdir, self.prefix) self.run_number = str(run_number) self.prefix_run = self.prefix + '-%03d' % run_number if self.targets0 is None: self.targets0 = self.startup_targets if self.targets1 is None: self.targets1 = self.targets if self.targets2 is None: self.targets2 = self.targets self.tmpdir = tempfile.mkdtemp(prefix=self.name + '-') commands.extend([ (os.chdir, 'cd %%s', self.tmpdir), ]) for archive in archive_list: if not os.path.isabs(archive): archive = os.path.join(self.orig_cwd, archive) if os.path.isdir(archive): dest = os.path.split(archive)[1] commands.append((shutil.copytree, 'cp -r %%s %%s', archive, dest)) else: suffix = self.archive_splitext(archive)[1] unpack_command = self.unpack_map.get(suffix) if not unpack_command: dest = os.path.split(archive)[1] commands.append((shutil.copyfile, 'cp %%s %%s', archive, dest)) else: commands.append(unpack_command + (archive,)) commands.extend([ (os.chdir, 'cd %%s', self.subdir), ]) commands.extend(self.initial_commands) commands.extend([ (lambda: read_tree('.'), 'find * -type f | xargs cat > /dev/null'), (self.set_env, 'export %%s=%%s', 'SCONS_LIB_DIR', self.scons_lib_dir), '%(python)s %(scons_wrapper)s --version', ]) index = 0 for run_command in self.run_commands: setattr(self, 'prof%d' % index, self.profile_name(index)) c = ( self.log_execute, self.log_display, run_command, self.logfile_name(index), ) commands.append(c) index = index + 1 commands.extend([ (os.chdir, 'cd %%s', self.orig_cwd), ]) if not os.environ.get('PRESERVE'): commands.extend(removals) commands.append((shutil.rmtree, 'rm -rf %%s', self.tmpdir)) self.run_command_list(commands, self.__dict__) self.scons = save_scons self.scons_lib_dir = save_scons_lib_dir self.scons_wrapper = save_scons_wrapper # def help_time(self): help = """\ Usage: scons-time time [OPTIONS] FILE [...] -C DIR, --chdir=DIR Change to DIR before looking for files -f FILE, --file=FILE Read configuration from specified FILE --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT -h, --help Print this help and exit -p STRING, --prefix=STRING Use STRING as log file/profile prefix -t NUMBER, --tail=NUMBER Only report the last NUMBER files --which=TIMER Plot timings for TIMER: total, SConscripts, SCons, commands. """ sys.stdout.write(self.outdent(help)) sys.stdout.flush() def do_time(self, argv): format = 'ascii' def _logfile_path(x): return x logfile_path = _logfile_path tail = None which = 'total' short_opts = '?C:f:hp:t:' long_opts = [ 'chdir=', 'file=', 'fmt=', 'format=', 'help', 'prefix=', 'tail=', 'title=', 'which=', ] opts, args = getopt.getopt(argv[1:], short_opts, long_opts) for o, a in opts: if o in ('-C', '--chdir'): self.chdir = a elif o in ('-f', '--file'): self.config_file = a elif o in ('--fmt', '--format'): format = a elif o in ('-?', '-h', '--help'): self.do_help(['help', 'time']) sys.exit(0) elif o in ('-p', '--prefix'): self.prefix = a elif o in ('-t', '--tail'): tail = int(a) elif o in ('--title',): self.title = a elif o in ('--which',): if a not in list(self.time_strings.keys()): sys.stderr.write('%s: time: Unrecognized timer "%s".\n' % (self.name, a)) sys.stderr.write('%s Type "%s help time" for help.\n' % (self.name_spaces, self.name)) sys.exit(1) which = a if self.config_file: with open(self.config_file, 'r') as f: config = f.read() HACK_for_exec(config, self.__dict__) if self.chdir: os.chdir(self.chdir) def _logfile_path_join(x): return os.path.join(self.chdir, x) logfile_path = _logfile_path_join if not args: pattern = '%s*.log' % self.prefix args = self.args_to_files([pattern], tail) if not args: if self.chdir: directory = self.chdir else: directory = os.getcwd() sys.stderr.write('%s: time: No arguments specified.\n' % self.name) sys.stderr.write('%s No %s*.log files found in "%s".\n' % (self.name_spaces, self.prefix, directory)) sys.stderr.write('%s Type "%s help time" for help.\n' % (self.name_spaces, self.name)) sys.exit(1) else: args = self.args_to_files(args, tail) cwd_ = os.getcwd() + os.sep if format == 'ascii': columns = ("Total", "SConscripts", "SCons", "commands") self.ascii_table(args, columns, self.get_debug_times, logfile_path) elif format == 'gnuplot': results = self.collect_results(args, self.get_debug_times, self.time_strings[which]) self.gnuplot_results(results, fmt='%s %.6f') else: sys.stderr.write('%s: time: Unknown format "%s".\n' % (self.name, format)) sys.exit(1) if __name__ == '__main__': opts, args = getopt.getopt(sys.argv[1:], 'h?V', ['help', 'version']) ST = SConsTimer() for o, a in opts: if o in ('-?', '-h', '--help'): ST.do_help(['help']) sys.exit(0) elif o in ('-V', '--version'): sys.stdout.write('scons-time version\n') sys.exit(0) if not args: sys.stderr.write('Type "%s help" for usage.\n' % ST.name) sys.exit(1) ST.execute_subcommand(args) # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
lgpl-2.1
sagarpabba/robotframework-seleniumlibrary
src/SeleniumLibrary/screenshot.py
9
3763
# Copyright 2008-2011 Nokia Siemens Networks Oyj # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import base64 from robot import utils class Screenshot(object): def capture_screenshot(self, filename=None): """Takes a screenshot of the entire screen and embeds it into the log. If no `filename` is given, the screenshot is saved into file `selenium-screenshot-<counter>.png` under the directory where the Robot Framework log file is written into. The `filename` is also considered relative to the same directory, if it is not given in absolute format. When running on a locked Windows machine, the resulting screenshots will be all black. A workaround is using the `Capture Page Screenshot` keyword instead. There were some changes to this keyword in the 2.3 release: - Possibility to take screenshots also when the Selenium Server is running on a remote machine was added. - Support for absolute `filename` paths was added. - Automatic creation of intermediate directories in the path where the screenshot is saved was removed. `OperatingSystem.Create Directory` can be used instead. """ data = self._selenium.capture_screenshot_to_string() self._save_screenshot(data, filename) def capture_page_screenshot(self, filename=None, css='background=#CCFFDD'): """Takes a screenshot of the current page and embeds it into the log. `filename` argument specifies the name of the file to write the screenshot into. It works the same was as with `Capture Screenshot`. `css` can be used to modify how the screenshot is taken. By default the bakground color is changed to avoid possible problems with background leaking when the page layout is somehow broken. Selenium currently supports this keyword out-of-the-box only with Firefox browser. To make it work with IE, you can start the Selenium Server with `-singleWindow` option and use `*ieproxy` as the browser. Additionally, the browser independent `Capture Screenshot` keyword can be used instead. This keyword was added in SeleniumLibrary 2.3. """ data = self._selenium.capture_entire_page_screenshot_to_string(css) self._save_screenshot(data, filename) def _save_screenshot(self, data, filename): path, link = self._get_screenshot_paths(filename) outfile = open(path, 'wb') # 'decodestring' is used instead of 'b64decode'to support Jython 2.2 outfile.write(base64.decodestring(data)) outfile.close() # Image is shown on its own row and thus prev row is closed on purpose self._html('</td></tr><tr><td colspan="3"><a href="%s">' '<img src="%s" width="800px"></a>' % (link, link)) def _get_screenshot_paths(self, filename): if not filename: filename = self._namegen.next() else: filename = filename.replace('/', os.sep) logdir = self._get_log_dir() path = os.path.join(logdir, filename) link = utils.get_link_path(path, logdir) return path, link
apache-2.0
yugangw-msft/azure-cli
src/azure-cli/azure/cli/command_modules/vm/_completers.py
9
1524
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from azure.cli.core.commands.parameters import get_one_of_subscription_locations from azure.cli.core.decorators import Completer from azure.cli.command_modules.vm._actions import load_images_from_aliases_doc, get_vm_sizes @Completer def get_urn_aliases_completion_list(cmd, prefix, namespace): # pylint: disable=unused-argument images = load_images_from_aliases_doc(cmd.cli_ctx) return [i['urnAlias'] for i in images] @Completer def get_vm_size_completion_list(cmd, prefix, namespace): # pylint: disable=unused-argument location = namespace.location if not location: location = get_one_of_subscription_locations(cmd.cli_ctx) result = get_vm_sizes(cmd.cli_ctx, location) return [r.name for r in result] @Completer def get_vm_run_command_completion_list(cmd, prefix, namespace): # pylint: disable=unused-argument from ._client_factory import _compute_client_factory try: location = namespace.location except AttributeError: location = get_one_of_subscription_locations(cmd.cli_ctx) result = _compute_client_factory(cmd.cli_ctx).virtual_machine_run_commands.list(location) return [r.id for r in result]
mit
isaac-s/cloudify-manager
rest-service/manager_rest/flask_utils.py
1
3338
######### # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. from collections import namedtuple from flask import Flask from flask_migrate import Migrate from flask_security import Security from manager_rest import config from manager_rest.storage import user_datastore, db def setup_flask_app(manager_ip='localhost', driver='', hash_salt=None, secret_key=None): """Setup a functioning flask app, when working outside the rest-service :param manager_ip: The IP of the manager :param driver: SQLA driver for postgres (e.g. pg8000) :param hash_salt: The salt to be used when creating user passwords :param secret_key: Secret key used when hashing flask tokens :return: A Flask app """ app = Flask(__name__) db_uri = _get_postgres_db_uri(manager_ip, driver) app.config['SQLALCHEMY_DATABASE_URI'] = db_uri app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False set_flask_security_config(app, hash_salt, secret_key) Security(app=app, datastore=user_datastore) Migrate(app=app, db=db) db.init_app(app) app.app_context().push() return app def _get_postgres_db_uri(manager_ip, driver): """Get a valid SQLA DB URI """ dialect = 'postgresql+{0}'.format(driver) if driver else 'postgres' conf = get_postgres_conf() return '{dialect}://{username}:{password}@{host}/{db_name}'.format( dialect=dialect, username=conf.username, password=conf.password, host=manager_ip, db_name=conf.db_name ) def get_postgres_conf(): """Return a namedtuple with info used to connect to cloudify's PG DB """ conf = namedtuple('PGConf', 'username password db_name') return conf( username='cloudify', password='cloudify', db_name='cloudify_db' ) def set_flask_security_config(app, hash_salt=None, secret_key=None): """Set all necessary Flask-Security configurations :param app: Flask app object :param hash_salt: The salt to be used when creating user passwords :param secret_key: Secret key used when hashing flask tokens """ hash_salt = hash_salt or config.instance.security_hash_salt secret_key = secret_key or config.instance.security_secret_key # Make sure that it's possible to get users from the datastore # by username and not just by email (the default behavior) app.config['SECURITY_USER_IDENTITY_ATTRIBUTES'] = 'username, email' app.config['SECURITY_PASSWORD_HASH'] = 'pbkdf2_sha256' app.config['SECURITY_TOKEN_MAX_AGE'] = 36000 # 10 hours app.config['SECURITY_PASSWORD_SALT'] = hash_salt app.config['SECURITY_REMEMBER_SALT'] = hash_salt app.config['SECRET_KEY'] = secret_key
apache-2.0
joakim-hove/ert
python/python/ert_gui/tools/export/export_tool.py
4
1955
# Copyright (C) 2014 Statoil ASA, Norway. # # The file 'export_tool.py' is part of ERT - Ensemble based Reservoir Tool. # # ERT is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ERT is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. # # See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html> # for more details. from weakref import ref from ert_gui.ertwidgets import resourceIcon from ert_gui.ertwidgets.closabledialog import ClosableDialog from ert_gui.ertwidgets.models.ertmodel import getCurrentCaseName from ert_gui.tools import Tool from ert_gui.tools.export import ExportPanel, Exporter, ExportKeywordModel class ExportTool(Tool): def __init__(self): super(ExportTool, self).__init__("Export Data", "tools/export", resourceIcon("ide/table_export")) self.__export_widget = None self.__dialog = None self.__exporter = None self.setEnabled(ExportKeywordModel().hasKeywords()) def trigger(self): if self.__export_widget is None: self.__export_widget = ref(ExportPanel(self.parent())) self.__exporter = Exporter() self.__export_widget().runExport.connect(self.__exporter.runExport) self.__export_widget().setSelectedCase(getCurrentCaseName()) self.__dialog = ref(ClosableDialog("Export", self.__export_widget(), self.parent())) self.__export_widget().updateExportButton.connect(self.__dialog().toggleButton) self.__dialog().addButton("Export", self.export) self.__dialog().show() def export(self): self.__export_widget().export() self.__dialog().accept()
gpl-3.0
aabbox/kbengine
kbe/res/scripts/common/Lib/rlcompleter.py
100
5763
"""Word completion for GNU readline. The completer completes keywords, built-ins and globals in a selectable namespace (which defaults to __main__); when completing NAME.NAME..., it evaluates (!) the expression up to the last dot and completes its attributes. It's very cool to do "import sys" type "sys.", hit the completion key (twice), and see the list of names defined by the sys module! Tip: to use the tab key as the completion key, call readline.parse_and_bind("tab: complete") Notes: - Exceptions raised by the completer function are *ignored* (and generally cause the completion to fail). This is a feature -- since readline sets the tty device in raw (or cbreak) mode, printing a traceback wouldn't work well without some complicated hoopla to save, reset and restore the tty state. - The evaluation of the NAME.NAME... form may cause arbitrary application defined code to be executed if an object with a __getattr__ hook is found. Since it is the responsibility of the application (or the user) to enable this feature, I consider this an acceptable risk. More complicated expressions (e.g. function calls or indexing operations) are *not* evaluated. - When the original stdin is not a tty device, GNU readline is never used, and this module (and the readline module) are silently inactive. """ import atexit import builtins import __main__ __all__ = ["Completer"] class Completer: def __init__(self, namespace = None): """Create a new completer for the command line. Completer([namespace]) -> completer instance. If unspecified, the default namespace where completions are performed is __main__ (technically, __main__.__dict__). Namespaces should be given as dictionaries. Completer instances should be used as the completion mechanism of readline via the set_completer() call: readline.set_completer(Completer(my_namespace).complete) """ if namespace and not isinstance(namespace, dict): raise TypeError('namespace must be a dictionary') # Don't bind to namespace quite yet, but flag whether the user wants a # specific namespace or to use __main__.__dict__. This will allow us # to bind to __main__.__dict__ at completion time, not now. if namespace is None: self.use_main_ns = 1 else: self.use_main_ns = 0 self.namespace = namespace def complete(self, text, state): """Return the next possible completion for 'text'. This is called successively with state == 0, 1, 2, ... until it returns None. The completion should begin with 'text'. """ if self.use_main_ns: self.namespace = __main__.__dict__ if state == 0: if "." in text: self.matches = self.attr_matches(text) else: self.matches = self.global_matches(text) try: return self.matches[state] except IndexError: return None def _callable_postfix(self, val, word): if callable(val): word = word + "(" return word def global_matches(self, text): """Compute matches when text is a simple name. Return a list of all keywords, built-in functions and names currently defined in self.namespace that match. """ import keyword matches = [] n = len(text) for word in keyword.kwlist: if word[:n] == text: matches.append(word) for nspace in [builtins.__dict__, self.namespace]: for word, val in nspace.items(): if word[:n] == text and word != "__builtins__": matches.append(self._callable_postfix(val, word)) return matches def attr_matches(self, text): """Compute matches when text contains a dot. Assuming the text is of the form NAME.NAME....[NAME], and is evaluable in self.namespace, it will be evaluated and its attributes (as revealed by dir()) are used as possible completions. (For class instances, class members are also considered.) WARNING: this can still invoke arbitrary C code, if an object with a __getattr__ hook is evaluated. """ import re m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text) if not m: return [] expr, attr = m.group(1, 3) try: thisobject = eval(expr, self.namespace) except Exception: return [] # get the content of the object, except __builtins__ words = dir(thisobject) if "__builtins__" in words: words.remove("__builtins__") if hasattr(thisobject, '__class__'): words.append('__class__') words.extend(get_class_members(thisobject.__class__)) matches = [] n = len(attr) for word in words: if word[:n] == attr and hasattr(thisobject, word): val = getattr(thisobject, word) word = self._callable_postfix(val, "%s.%s" % (expr, word)) matches.append(word) return matches def get_class_members(klass): ret = dir(klass) if hasattr(klass,'__bases__'): for base in klass.__bases__: ret = ret + get_class_members(base) return ret try: import readline except ImportError: pass else: readline.set_completer(Completer().complete) # Release references early at shutdown (the readline module's # contents are quasi-immortal, and the completer function holds a # reference to globals). atexit.register(lambda: readline.set_completer(None))
lgpl-3.0
Yearcoin-dev/yearcoin
qa/rpc-tests/invalidateblock.py
104
3077
#!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Test InvalidateBlock code # from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * class InvalidateTest(BitcoinTestFramework): def __init__(self): super().__init__() self.setup_clean_chain = True self.num_nodes = 3 def setup_network(self): self.nodes = [] self.is_network_split = False self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"])) self.nodes.append(start_node(1, self.options.tmpdir, ["-debug"])) self.nodes.append(start_node(2, self.options.tmpdir, ["-debug"])) def run_test(self): print("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:") print("Mine 4 blocks on Node 0") self.nodes[0].generate(4) assert(self.nodes[0].getblockcount() == 4) besthash = self.nodes[0].getbestblockhash() print("Mine competing 6 blocks on Node 1") self.nodes[1].generate(6) assert(self.nodes[1].getblockcount() == 6) print("Connect nodes to force a reorg") connect_nodes_bi(self.nodes,0,1) sync_blocks(self.nodes[0:2]) assert(self.nodes[0].getblockcount() == 6) badhash = self.nodes[1].getblockhash(2) print("Invalidate block 2 on node 0 and verify we reorg to node 0's original chain") self.nodes[0].invalidateblock(badhash) newheight = self.nodes[0].getblockcount() newhash = self.nodes[0].getbestblockhash() if (newheight != 4 or newhash != besthash): raise AssertionError("Wrong tip for node0, hash %s, height %d"%(newhash,newheight)) print("\nMake sure we won't reorg to a lower work chain:") connect_nodes_bi(self.nodes,1,2) print("Sync node 2 to node 1 so both have 6 blocks") sync_blocks(self.nodes[1:3]) assert(self.nodes[2].getblockcount() == 6) print("Invalidate block 5 on node 1 so its tip is now at 4") self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5)) assert(self.nodes[1].getblockcount() == 4) print("Invalidate block 3 on node 2, so its tip is now 2") self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3)) assert(self.nodes[2].getblockcount() == 2) print("..and then mine a block") self.nodes[2].generate(1) print("Verify all nodes are at the right height") time.sleep(5) for i in range(3): print(i,self.nodes[i].getblockcount()) assert(self.nodes[2].getblockcount() == 3) assert(self.nodes[0].getblockcount() == 4) node1height = self.nodes[1].getblockcount() if node1height < 4: raise AssertionError("Node 1 reorged to a lower height: %d"%node1height) if __name__ == '__main__': InvalidateTest().main()
mit
fafaman/django
django/template/utils.py
308
4736
import os import warnings from collections import Counter, OrderedDict from django.apps import apps from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils import lru_cache from django.utils._os import upath from django.utils.deprecation import RemovedInDjango110Warning from django.utils.functional import cached_property from django.utils.module_loading import import_string class InvalidTemplateEngineError(ImproperlyConfigured): pass class EngineHandler(object): def __init__(self, templates=None): """ templates is an optional list of template engine definitions (structured like settings.TEMPLATES). """ self._templates = templates self._engines = {} @cached_property def templates(self): if self._templates is None: self._templates = settings.TEMPLATES if not self._templates: warnings.warn( "You haven't defined a TEMPLATES setting. You must do so " "before upgrading to Django 1.10. Otherwise Django will be " "unable to load templates.", RemovedInDjango110Warning) self._templates = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': settings.TEMPLATE_DIRS, 'OPTIONS': { 'allowed_include_roots': settings.ALLOWED_INCLUDE_ROOTS, 'context_processors': settings.TEMPLATE_CONTEXT_PROCESSORS, 'debug': settings.TEMPLATE_DEBUG, 'loaders': settings.TEMPLATE_LOADERS, 'string_if_invalid': settings.TEMPLATE_STRING_IF_INVALID, }, }, ] templates = OrderedDict() backend_names = [] for tpl in self._templates: tpl = tpl.copy() try: # This will raise an exception if 'BACKEND' doesn't exist or # isn't a string containing at least one dot. default_name = tpl['BACKEND'].rsplit('.', 2)[-2] except Exception: invalid_backend = tpl.get('BACKEND', '<not defined>') raise ImproperlyConfigured( "Invalid BACKEND for a template engine: {}. Check " "your TEMPLATES setting.".format(invalid_backend)) tpl.setdefault('NAME', default_name) tpl.setdefault('DIRS', []) tpl.setdefault('APP_DIRS', False) tpl.setdefault('OPTIONS', {}) templates[tpl['NAME']] = tpl backend_names.append(tpl['NAME']) counts = Counter(backend_names) duplicates = [alias for alias, count in counts.most_common() if count > 1] if duplicates: raise ImproperlyConfigured( "Template engine aliases aren't unique, duplicates: {}. " "Set a unique NAME for each engine in settings.TEMPLATES." .format(", ".join(duplicates))) return templates def __getitem__(self, alias): try: return self._engines[alias] except KeyError: try: params = self.templates[alias] except KeyError: raise InvalidTemplateEngineError( "Could not find config for '{}' " "in settings.TEMPLATES".format(alias)) # If importing or initializing the backend raises an exception, # self._engines[alias] isn't set and this code may get executed # again, so we must preserve the original params. See #24265. params = params.copy() backend = params.pop('BACKEND') engine_cls = import_string(backend) engine = engine_cls(params) self._engines[alias] = engine return engine def __iter__(self): return iter(self.templates) def all(self): return [self[alias] for alias in self] @lru_cache.lru_cache() def get_app_template_dirs(dirname): """ Return an iterable of paths of directories to load app templates from. dirname is the name of the subdirectory containing templates inside installed applications. """ template_dirs = [] for app_config in apps.get_app_configs(): if not app_config.path: continue template_dir = os.path.join(app_config.path, dirname) if os.path.isdir(template_dir): template_dirs.append(upath(template_dir)) # Immutable return value because it will be cached and shared by callers. return tuple(template_dirs)
bsd-3-clause
huangciyin/youtube-dl
youtube_dl/extractor/rtbf.py
44
1654
# coding: utf-8 from __future__ import unicode_literals import re import json from .common import InfoExtractor class RTBFIE(InfoExtractor): _VALID_URL = r'https?://www.rtbf.be/video/[^\?]+\?id=(?P<id>\d+)' _TEST = { 'url': 'https://www.rtbf.be/video/detail_les-diables-au-coeur-episode-2?id=1921274', 'md5': '799f334ddf2c0a582ba80c44655be570', 'info_dict': { 'id': '1921274', 'ext': 'mp4', 'title': 'Les Diables au coeur (épisode 2)', 'description': 'Football - Diables Rouges', 'duration': 3099, 'timestamp': 1398456336, 'upload_date': '20140425', } } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') page = self._download_webpage('https://www.rtbf.be/video/embed?id=%s' % video_id, video_id) data = json.loads(self._html_search_regex( r'<div class="js-player-embed(?: player-embed)?" data-video="([^"]+)"', page, 'data video'))['data'] video_url = data.get('downloadUrl') or data.get('url') if data['provider'].lower() == 'youtube': return self.url_result(video_url, 'Youtube') return { 'id': video_id, 'url': video_url, 'title': data['title'], 'description': data.get('description') or data.get('subtitle'), 'thumbnail': data['thumbnail']['large'], 'duration': data.get('duration') or data.get('realDuration'), 'timestamp': data['created'], 'view_count': data['viewCount'], }
unlicense
forevernull/incubator-airflow
airflow/ti_deps/deps/runnable_exec_date_dep.py
42
1941
# -*- coding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from datetime import datetime from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.utils.db import provide_session class RunnableExecDateDep(BaseTIDep): NAME = "Execution Date" IGNOREABLE = True @provide_session def _get_dep_statuses(self, ti, session, dep_context): cur_date = datetime.now() if ti.execution_date > cur_date: yield self._failing_status( reason="Execution date {0} is in the future (the current " "date is {1}).".format(ti.execution_date.isoformat(), cur_date.isoformat())) if ti.task.end_date and ti.execution_date > ti.task.end_date: yield self._failing_status( reason="The execution date is {0} but this is after the task's end date " "{1}.".format( ti.execution_date.isoformat(), ti.task.end_date.isoformat())) if (ti.task.dag and ti.task.dag.end_date and ti.execution_date > ti.task.dag.end_date): yield self._failing_status( reason="The execution date is {0} but this is after the task's DAG's " "end date {1}.".format( ti.execution_date.isoformat(), ti.task.dag.end_date.isoformat()))
apache-2.0
Tecnativa/website
website_event_register_free_with_sale/controllers/website_event.py
28
2630
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2015 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com) # Pedro M. Baeza <pedro.baeza@serviciosbaeza.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import http from openerp.http import request from openerp.addons.website_event_sale.controllers.main import website_event class WebsiteEvent(website_event): @http.route(['/event/cart/update'], type='http', auth="public", methods=['POST'], website=True) def cart_update(self, **post): has_paid_tickets = False post['free_tickets'] = 0 for key, value in post.items(): qty = int(value or "0") ticket_words = key.split("-") ticket_id = (ticket_words[0] == 'ticket' and int(ticket_words[1]) or None) if not qty or not ticket_id: continue ticket = request.env['event.event.ticket'].sudo().browse(ticket_id) if not ticket.price: # Accumulate possible multiple free tickets post['free_tickets'] = ( str(int(post['free_tickets']) + qty)) else: has_paid_tickets = True # Add to shopping cart the rest of the items order = request.website.sale_get_order(force_create=1) order.with_context(event_ticket_id=ticket.id)._cart_update( product_id=ticket.product_id.id, add_qty=qty) if not post['free_tickets'] and not has_paid_tickets: return request.redirect("/event/%s" % post['event_id']) request.session.update({ 'free_tickets': post['free_tickets'], 'event_id': post['event_id'], }) return request.redirect("/shop/checkout")
agpl-3.0
NoahFlowa/glowing-spoon
venv/lib/python2.7/site-packages/pip/_vendor/__init__.py
329
4670
""" pip._vendor is for vendoring dependencies of pip to prevent needing pip to depend on something external. Files inside of pip._vendor should be considered immutable and should only be updated to versions from upstream. """ from __future__ import absolute_import import glob import os.path import sys # Downstream redistributors which have debundled our dependencies should also # patch this value to be true. This will trigger the additional patching # to cause things like "six" to be available as pip. DEBUNDLED = False # By default, look in this directory for a bunch of .whl files which we will # add to the beginning of sys.path before attempting to import anything. This # is done to support downstream re-distributors like Debian and Fedora who # wish to create their own Wheels for our dependencies to aid in debundling. WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) # Define a small helper function to alias our vendored modules to the real ones # if the vendored ones do not exist. This idea of this was taken from # https://github.com/kennethreitz/requests/pull/2567. def vendored(modulename): vendored_name = "{0}.{1}".format(__name__, modulename) try: __import__(vendored_name, globals(), locals(), level=0) except ImportError: try: __import__(modulename, globals(), locals(), level=0) except ImportError: # We can just silently allow import failures to pass here. If we # got to this point it means that ``import pip._vendor.whatever`` # failed and so did ``import whatever``. Since we're importing this # upfront in an attempt to alias imports, not erroring here will # just mean we get a regular import error whenever pip *actually* # tries to import one of these modules to use it, which actually # gives us a better error message than we would have otherwise # gotten. pass else: sys.modules[vendored_name] = sys.modules[modulename] base, head = vendored_name.rsplit(".", 1) setattr(sys.modules[base], head, sys.modules[modulename]) # If we're operating in a debundled setup, then we want to go ahead and trigger # the aliasing of our vendored libraries as well as looking for wheels to add # to our sys.path. This will cause all of this code to be a no-op typically # however downstream redistributors can enable it in a consistent way across # all platforms. if DEBUNDLED: # Actually look inside of WHEEL_DIR to find .whl files and add them to the # front of our sys.path. sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path # Actually alias all of our vendored dependencies. vendored("cachecontrol") vendored("colorama") vendored("distlib") vendored("distro") vendored("html5lib") vendored("lockfile") vendored("six") vendored("six.moves") vendored("six.moves.urllib") vendored("packaging") vendored("packaging.version") vendored("packaging.specifiers") vendored("pkg_resources") vendored("progress") vendored("retrying") vendored("requests") vendored("requests.packages") vendored("requests.packages.urllib3") vendored("requests.packages.urllib3._collections") vendored("requests.packages.urllib3.connection") vendored("requests.packages.urllib3.connectionpool") vendored("requests.packages.urllib3.contrib") vendored("requests.packages.urllib3.contrib.ntlmpool") vendored("requests.packages.urllib3.contrib.pyopenssl") vendored("requests.packages.urllib3.exceptions") vendored("requests.packages.urllib3.fields") vendored("requests.packages.urllib3.filepost") vendored("requests.packages.urllib3.packages") vendored("requests.packages.urllib3.packages.ordered_dict") vendored("requests.packages.urllib3.packages.six") vendored("requests.packages.urllib3.packages.ssl_match_hostname") vendored("requests.packages.urllib3.packages.ssl_match_hostname." "_implementation") vendored("requests.packages.urllib3.poolmanager") vendored("requests.packages.urllib3.request") vendored("requests.packages.urllib3.response") vendored("requests.packages.urllib3.util") vendored("requests.packages.urllib3.util.connection") vendored("requests.packages.urllib3.util.request") vendored("requests.packages.urllib3.util.response") vendored("requests.packages.urllib3.util.retry") vendored("requests.packages.urllib3.util.ssl_") vendored("requests.packages.urllib3.util.timeout") vendored("requests.packages.urllib3.util.url")
apache-2.0
sameetb-cuelogic/edx-platform-test
common/djangoapps/course_about/data.py
16
1855
"""Data Aggregation Layer for the Course About API. This is responsible for combining data from the following resources: * CourseDescriptor * CourseAboutDescriptor """ import logging from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import CourseKey from course_about.serializers import serialize_content from course_about.errors import CourseNotFoundError from xmodule.modulestore.django import modulestore from xmodule.modulestore.exceptions import ItemNotFoundError log = logging.getLogger(__name__) ABOUT_ATTRIBUTES = [ 'effort' ] def get_course_about_details(course_id): # pylint: disable=unused-argument """ Return course information for a given course id. Args: course_id(str) : The course id to retrieve course information for. Returns: Serializable dictionary of the Course About Information. Raises: CourseNotFoundError """ try: course_key = CourseKey.from_string(course_id) course_descriptor = modulestore().get_course(course_key) if course_descriptor is None: raise CourseNotFoundError("course not found") except InvalidKeyError as err: raise CourseNotFoundError(err.message) about_descriptor = { attribute: _fetch_course_detail(course_key, attribute) for attribute in ABOUT_ATTRIBUTES } course_info = serialize_content(course_descriptor=course_descriptor, about_descriptor=about_descriptor) return course_info def _fetch_course_detail(course_key, attribute): """ Fetch the course about attribute for the given course's attribute from persistence and return its value. """ usage_key = course_key.make_usage_key('about', attribute) try: value = modulestore().get_item(usage_key).data except ItemNotFoundError: value = None return value
agpl-3.0
sbidoul/odoo
addons/project/__openerp__.py
259
2562
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Project Management', 'version': '1.1', 'author': 'OpenERP SA', 'website': 'https://www.odoo.com/page/project-management', 'category': 'Project Management', 'sequence': 8, 'summary': 'Projects, Tasks', 'depends': [ 'base_setup', 'product', 'analytic', 'board', 'mail', 'resource', 'web_kanban' ], 'description': """ Track multi-level projects, tasks, work done on tasks ===================================================== This application allows an operational project management system to organize your activities into tasks and plan the work you need to get the tasks completed. Gantt diagrams will give you a graphical representation of your project plans, as well as resources availability and workload. Dashboard / Reports for Project Management will include: -------------------------------------------------------- * My Tasks * Open Tasks * Tasks Analysis * Cumulative Flow """, 'data': [ 'security/project_security.xml', 'wizard/project_task_delegate_view.xml', 'security/ir.model.access.csv', 'project_data.xml', 'project_view.xml', 'res_partner_view.xml', 'report/project_report_view.xml', 'report/project_cumulative.xml', 'res_config_view.xml', 'views/project.xml', ], 'demo': ['project_demo.xml'], 'test': [ ], 'installable': True, 'auto_install': False, 'application': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
skalk/linux
Documentation/networking/cxacru-cf.py
14668
1626
#!/usr/bin/env python # Copyright 2009 Simon Arlott # # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation; either version 2 of the License, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 59 # Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # Usage: cxacru-cf.py < cxacru-cf.bin # Output: values string suitable for the sysfs adsl_config attribute # # Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110 # contains mis-aligned values which will stop the modem from being able # to make a connection. If the first and last two bytes are removed then # the values become valid, but the modulation will be forced to ANSI # T1.413 only which may not be appropriate. # # The original binary format is a packed list of le32 values. import sys import struct i = 0 while True: buf = sys.stdin.read(4) if len(buf) == 0: break elif len(buf) != 4: sys.stdout.write("\n") sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf))) sys.exit(1) if i > 0: sys.stdout.write(" ") sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0])) i += 1 sys.stdout.write("\n")
gpl-2.0
rh-s/heat
heat_integrationtests/functional/test_hooks.py
3
13215
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import yaml from heat_integrationtests.common import test LOG = logging.getLogger(__name__) class HooksTest(test.HeatIntegrationTest): def setUp(self): super(HooksTest, self).setUp() self.client = self.orchestration_client self.template = {'heat_template_version': '2014-10-16', 'resources': { 'foo_step1': {'type': 'OS::Heat::RandomString'}, 'foo_step2': {'type': 'OS::Heat::RandomString', 'depends_on': 'foo_step1'}, 'foo_step3': {'type': 'OS::Heat::RandomString', 'depends_on': 'foo_step2'}}} def test_hook_pre_create(self): env = {'resource_registry': {'resources': {'foo_step2': {'hooks': 'pre-create'}}}} # Note we don't wait for CREATE_COMPLETE, because we need to # signal to clear the hook before create will complete stack_identifier = self.stack_create( template=self.template, environment=env, expected_status='CREATE_IN_PROGRESS') self._wait_for_resource_status( stack_identifier, 'foo_step1', 'CREATE_COMPLETE') self._wait_for_resource_status( stack_identifier, 'foo_step2', 'INIT_COMPLETE') ev = self.wait_for_event_with_reason( stack_identifier, reason='CREATE paused until Hook pre-create is cleared', rsrc_name='foo_step2') self.assertEqual('INIT_COMPLETE', ev[0].resource_status) self.client.resources.signal(stack_identifier, 'foo_step2', data={'unset_hook': 'pre-create'}) ev = self.wait_for_event_with_reason( stack_identifier, reason='Hook pre-create is cleared', rsrc_name='foo_step2') self.assertEqual('INIT_COMPLETE', ev[0].resource_status) self._wait_for_resource_status( stack_identifier, 'foo_step2', 'CREATE_COMPLETE') self._wait_for_stack_status(stack_identifier, 'CREATE_COMPLETE') def test_hook_pre_update_nochange(self): env = {'resource_registry': {'resources': {'foo_step2': {'hooks': 'pre-update'}}}} stack_identifier = self.stack_create( template=self.template, environment=env) res_before = self.client.resources.get(stack_identifier, 'foo_step2') # Note we don't wait for UPDATE_COMPLETE, because we need to # signal to clear the hook before update will complete self.update_stack( stack_identifier, template=self.template, environment=env, expected_status='UPDATE_IN_PROGRESS') # Note when a hook is specified, the resource status doesn't change # when we hit the hook, so we look for the event, then assert the # state is unchanged. self._wait_for_resource_status( stack_identifier, 'foo_step2', 'CREATE_COMPLETE') ev = self.wait_for_event_with_reason( stack_identifier, reason='UPDATE paused until Hook pre-update is cleared', rsrc_name='foo_step2') self.assertEqual('CREATE_COMPLETE', ev[0].resource_status) self.client.resources.signal(stack_identifier, 'foo_step2', data={'unset_hook': 'pre-update'}) ev = self.wait_for_event_with_reason( stack_identifier, reason='Hook pre-update is cleared', rsrc_name='foo_step2') self.assertEqual('CREATE_COMPLETE', ev[0].resource_status) self._wait_for_resource_status( stack_identifier, 'foo_step2', 'CREATE_COMPLETE') self._wait_for_stack_status(stack_identifier, 'UPDATE_COMPLETE') res_after = self.client.resources.get(stack_identifier, 'foo_step2') self.assertEqual(res_before.physical_resource_id, res_after.physical_resource_id) def test_hook_pre_update_replace(self): env = {'resource_registry': {'resources': {'foo_step2': {'hooks': 'pre-update'}}}} stack_identifier = self.stack_create( template=self.template, environment=env) res_before = self.client.resources.get(stack_identifier, 'foo_step2') # Note we don't wait for UPDATE_COMPLETE, because we need to # signal to clear the hook before update will complete self.template['resources']['foo_step2']['properties'] = {'length': 10} self.update_stack( stack_identifier, template=self.template, environment=env, expected_status='UPDATE_IN_PROGRESS') # Note when a hook is specified, the resource status doesn't change # when we hit the hook, so we look for the event, then assert the # state is unchanged. self._wait_for_resource_status( stack_identifier, 'foo_step2', 'CREATE_COMPLETE') ev = self.wait_for_event_with_reason( stack_identifier, reason='UPDATE paused until Hook pre-update is cleared', rsrc_name='foo_step2') self.assertEqual('CREATE_COMPLETE', ev[0].resource_status) self.client.resources.signal(stack_identifier, 'foo_step2', data={'unset_hook': 'pre-update'}) ev = self.wait_for_event_with_reason( stack_identifier, reason='Hook pre-update is cleared', rsrc_name='foo_step2') self.assertEqual('CREATE_COMPLETE', ev[0].resource_status) self._wait_for_resource_status( stack_identifier, 'foo_step2', 'CREATE_COMPLETE') self._wait_for_stack_status(stack_identifier, 'UPDATE_COMPLETE') res_after = self.client.resources.get(stack_identifier, 'foo_step2') self.assertNotEqual(res_before.physical_resource_id, res_after.physical_resource_id) def test_hook_pre_update_in_place(self): env = {'resource_registry': {'resources': {'rg': {'hooks': 'pre-update'}}}} template = {'heat_template_version': '2014-10-16', 'resources': { 'rg': { 'type': 'OS::Heat::ResourceGroup', 'properties': { 'count': 1, 'resource_def': { 'type': 'OS::Heat::RandomString'}}}}} # Note we don't wait for CREATE_COMPLETE, because we need to # signal to clear the hook before create will complete stack_identifier = self.stack_create( template=template, environment=env) res_before = self.client.resources.get(stack_identifier, 'rg') template['resources']['rg']['properties']['count'] = 2 self.update_stack( stack_identifier, template=template, environment=env, expected_status='UPDATE_IN_PROGRESS') # Note when a hook is specified, the resource status doesn't change # when we hit the hook, so we look for the event, then assert the # state is unchanged. self._wait_for_resource_status( stack_identifier, 'rg', 'CREATE_COMPLETE') ev = self.wait_for_event_with_reason( stack_identifier, reason='UPDATE paused until Hook pre-update is cleared', rsrc_name='rg') self.assertEqual('CREATE_COMPLETE', ev[0].resource_status) self.client.resources.signal(stack_identifier, 'rg', data={'unset_hook': 'pre-update'}) ev = self.wait_for_event_with_reason( stack_identifier, reason='Hook pre-update is cleared', rsrc_name='rg') self.assertEqual('CREATE_COMPLETE', ev[0].resource_status) self._wait_for_stack_status(stack_identifier, 'UPDATE_COMPLETE') res_after = self.client.resources.get(stack_identifier, 'rg') self.assertEqual(res_before.physical_resource_id, res_after.physical_resource_id) def test_hook_pre_create_nested(self): files = {'nested.yaml': yaml.dump(self.template)} env = {'resource_registry': {'resources': {'nested': {'foo_step2': {'hooks': 'pre-create'}}}}} template = {'heat_template_version': '2014-10-16', 'resources': { 'nested': {'type': 'nested.yaml'}}} # Note we don't wait for CREATE_COMPLETE, because we need to # signal to clear the hook before create will complete stack_identifier = self.stack_create( template=template, environment=env, files=files, expected_status='CREATE_IN_PROGRESS') self._wait_for_resource_status(stack_identifier, 'nested', 'CREATE_IN_PROGRESS') nested_identifier = self.assert_resource_is_a_stack( stack_identifier, 'nested', wait=True) self._wait_for_resource_status( nested_identifier, 'foo_step1', 'CREATE_COMPLETE') self._wait_for_resource_status( nested_identifier, 'foo_step2', 'INIT_COMPLETE') ev = self.wait_for_event_with_reason( nested_identifier, reason='CREATE paused until Hook pre-create is cleared', rsrc_name='foo_step2') self.assertEqual('INIT_COMPLETE', ev[0].resource_status) self.client.resources.signal(nested_identifier, 'foo_step2', data={'unset_hook': 'pre-create'}) ev = self.wait_for_event_with_reason( nested_identifier, reason='Hook pre-create is cleared', rsrc_name='foo_step2') self.assertEqual('INIT_COMPLETE', ev[0].resource_status) self._wait_for_resource_status( nested_identifier, 'foo_step2', 'CREATE_COMPLETE') self._wait_for_stack_status(stack_identifier, 'CREATE_COMPLETE') def test_hook_pre_create_wildcard(self): env = {'resource_registry': {'resources': {'foo_*': {'hooks': 'pre-create'}}}} # Note we don't wait for CREATE_COMPLETE, because we need to # signal to clear the hook before create will complete stack_identifier = self.stack_create( template=self.template, environment=env, expected_status='CREATE_IN_PROGRESS') self._wait_for_resource_status( stack_identifier, 'foo_step1', 'INIT_COMPLETE') self.wait_for_event_with_reason( stack_identifier, reason='CREATE paused until Hook pre-create is cleared', rsrc_name='foo_step1') self.client.resources.signal(stack_identifier, 'foo_step1', data={'unset_hook': 'pre-create'}) self.wait_for_event_with_reason( stack_identifier, reason='Hook pre-create is cleared', rsrc_name='foo_step1') self._wait_for_resource_status( stack_identifier, 'foo_step2', 'INIT_COMPLETE') self.wait_for_event_with_reason( stack_identifier, reason='CREATE paused until Hook pre-create is cleared', rsrc_name='foo_step2') self.client.resources.signal(stack_identifier, 'foo_step2', data={'unset_hook': 'pre-create'}) self.wait_for_event_with_reason( stack_identifier, reason='Hook pre-create is cleared', rsrc_name='foo_step2') self._wait_for_resource_status( stack_identifier, 'foo_step3', 'INIT_COMPLETE') self.wait_for_event_with_reason( stack_identifier, reason='CREATE paused until Hook pre-create is cleared', rsrc_name='foo_step3') self.client.resources.signal(stack_identifier, 'foo_step3', data={'unset_hook': 'pre-create'}) self.wait_for_event_with_reason( stack_identifier, reason='Hook pre-create is cleared', rsrc_name='foo_step3') self._wait_for_stack_status(stack_identifier, 'CREATE_COMPLETE')
apache-2.0
dcraft2/dcraft1127-myclone
gui/dialog.py
14
5395
#!/usr/bin/python # -*- coding: utf-8 -*- "Common dialogs (function wrappers for wxPython dialogs)" __author__ = "Mariano Reingart (reingart@gmail.com)" __copyright__ = "Copyright (C) 2013- Mariano Reingart" import wx from wx.lib import dialogs from .font import Font def alert(message, title="", parent=None, scrolled=False, icon="exclamation"): "Show a simple pop-up modal dialog" if not scrolled: icons = {'exclamation': wx.ICON_EXCLAMATION, 'error': wx.ICON_ERROR, 'question': wx.ICON_QUESTION, 'info': wx.ICON_INFORMATION} style = wx.OK | icons[icon] result = dialogs.messageDialog(parent, message, title, style) else: result = dialogs.scrolledMessageDialog(parent, message, title) def prompt(message="", title="", default="", multiline=False, password=None, parent=None): "Modal dialog asking for an input, returns string or None if cancelled" if password: style = wx.TE_PASSWORD | wx.OK | wx.CANCEL result = dialogs.textEntryDialog(parent, message, title, default, style) elif multiline: style = wx.TE_MULTILINE | wx.OK | wx.CANCEL result = dialogs.textEntryDialog(parent, message, title, default, style) # workaround for Mac OS X result.text = '\n'.join(result.text.splitlines()) else: result = dialogs.textEntryDialog(parent, message, title, default) if result.accepted: return result.text def confirm(message="", title="", default=False, ok=False, cancel=False, parent=None): "Ask for confirmation (yes/no or ok and cancel), returns True or False" style = wx.CENTRE if ok: style |= wx.OK else: style |= wx.YES | wx.NO if default: style |= wx.YES_DEFAULT else: style |= wx.NO_DEFAULT if cancel: style |= wx.CANCEL result = dialogs.messageDialog(parent, message, title, style) if cancel and result.returned == wx.ID_CANCEL: return None return result.accepted # True or False def select_font(message="", title="", font=None, parent=None): "Show a dialog to select a font" if font is not None: wx_font = font._get_wx_font() # use as default else: wx_font = None font = Font() # create an empty font result = dialogs.fontDialog(parent, font=wx_font) if result.accepted: font_data = result.fontData result.color = result.fontData.GetColour().Get() wx_font = result.fontData.GetChosenFont() font.set_wx_font(wx_font) wx_font = None return font def select_color(message="", title="", color=None, parent=None): "Show a dialog to pick a color" result = dialogs.colorDialog(parent, color=color) return result.accepted and result.color def open_file(title="Open", directory='', filename='', wildcard='All Files (*.*)|*.*', multiple=False, parent=None): "Show a dialog to select files to open, return path(s) if accepted" style = wx.OPEN if multiple: style |= wx.MULTIPLE result = dialogs.fileDialog(parent, title, directory, filename, wildcard, style) if result.paths and not multiple: return result.paths[0] else: return result.paths def save_file(title="Save", directory='', filename='', wildcard='All Files (*.*)|*.*', overwrite=False, parent=None): "Show a dialog to select file to save, return path(s) if accepted" style = wx.SAVE if not overwrite: style |= wx.OVERWRITE_PROMPT result = dialogs.fileDialog(parent, title, directory, filename, wildcard, style) return result.paths def choose_directory(message='Choose a directory', path="", parent=None): "Show a dialog to choose a directory" result = dialogs.directoryDialog(parent, message, path) return result.path def single_choice(options=[], message='', title='', parent=None): result = dialogs.singleChoiceDialog(parent, message, title, options) return result.selection def multiple_choice(options=[], message='', title='', parent=None): result = dialogs.multipleChoiceDialog(parent, message, title, options) return result.selection def find(default='', whole_words=0, case_sensitive=0, parent=None): "Shows a find text dialog" result = dialogs.findDialog(parent, default, whole_words, case_sensitive) return {'text': result.searchText, 'whole_words': result.wholeWordsOnly, 'case_sensitive': result.caseSensitive} if __name__ == "__main__": app = wx.App(redirect=False) alert("hola!", "Alert!", icon="error") text = prompt("Input your name:", "Prompt...", "mariano") print text ok = confirm("do you agree?", "Confirm?", default=True, cancel=True) print ok font = select_font("Select a font!") print font color = select_color("Pick a color") print color print open_file() print save_file(overwrite=True) print choose_directory() print single_choice(["1", 'b', '$']) print multiple_choice(["1", 'b', '$']) print find("hola")
lgpl-3.0
ecdpalma/napscheduler
napscheduler/scheduler.py
1
16056
""" This module is the main part of the library. It houses the Scheduler class and related exceptions. """ from threading import Thread, Event, Lock from datetime import datetime, timedelta from logging import getLogger import os import sys from napscheduler.util import * from napscheduler.triggers import SimpleTrigger, IntervalTrigger, CronTrigger from napscheduler.job import Job, MaxInstancesReachedError from napscheduler.events import * from napscheduler.threadpool import ThreadPool from napscheduler.observable import Observable logger = getLogger(__name__) class SchedulerAlreadyRunningError(Exception): """ Raised when attempting to start or configure the scheduler when it's already running. """ def __str__(self): return 'Scheduler is already running' class Scheduler(Observable): """ This class is responsible for scheduling jobs and triggering their execution. """ _stopped = False _thread = None def __init__(self, gconfig={}, **options): Observable.__init__(self) self._wakeup = Event() self._jobs = [] self._jobs_lock = Lock() self._pending_jobs = [] self.configure(gconfig, **options) def configure(self, gconfig={}, **options): """ Reconfigures the scheduler with the given options. Can only be done when the scheduler isn't running. """ if self.running: raise SchedulerAlreadyRunningError # Set general options config = combine_opts(gconfig, 'napscheduler.', options) self.misfire_grace_time = int(config.pop('misfire_grace_time', 1)) self.coalesce = asbool(config.pop('coalesce', True)) self.daemonic = asbool(config.pop('daemonic', True)) # Configure the thread pool if 'threadpool' in config: self._threadpool = maybe_ref(config['threadpool']) else: threadpool_opts = combine_opts(config, 'threadpool.') self._threadpool = ThreadPool(**threadpool_opts) def start(self): """ Starts the scheduler in a new thread. """ if self.running: raise SchedulerAlreadyRunningError # Schedule all pending jobs for job in self._pending_jobs: self._real_add_job(job, False) del self._pending_jobs[:] self._stopped = False self._thread = Thread(target=self._main_loop, name='NAPScheduler') self._thread.setDaemon(self.daemonic) self._thread.start() def shutdown(self, wait=True, shutdown_threadpool=True): """ Shuts down the scheduler and terminates the thread. Does not interrupt any currently running jobs. :param wait: ``True`` to wait until all currently executing jobs have finished (if ``shutdown_threadpool`` is also ``True``) :param shutdown_threadpool: ``True`` to shut down the thread pool """ if not self.running: return self._stopped = True self._wakeup.set() # Shut down the thread pool if shutdown_threadpool: self._threadpool.shutdown(wait) # Wait until the scheduler thread terminates self._thread.join() @property def running(self): return not self._stopped and self._thread and self._thread.isAlive() def _real_add_job(self, job, wakeup): job.compute_next_run_time(datetime.now()) if not job.next_run_time: raise ValueError('Not adding job since it would never be run') self._jobs_lock.acquire() self._jobs.append(job) self._jobs_lock.release() # Notify listeners that a new job has been added event = SchedulerEvent(EVENT_SCHEDULER_JOB_ADDED, job) self.notify_listeners(event) logger.info('Added job "%s" to scheduler', job) # Notify the scheduler about the new job if wakeup: self._wakeup.set() def add_job(self, trigger, func, args, kwargs, **options): """ Adds the given job to the job list and notifies the scheduler thread. :param trigger: alias of the job store to store the job in :param func: callable to run at the given time :param args: list of positional arguments to call func with :param kwargs: dict of keyword arguments to call func with :rtype: :class:`~napscheduler.job.Job` """ job = Job(trigger, func, args or [], kwargs or {}, options.pop('misfire_grace_time', self.misfire_grace_time), options.pop('coalesce', self.coalesce), **options) if not self.running: self._pending_jobs.append(job) logger.info('Adding job tentatively -- it will be properly ' 'scheduled when the scheduler starts') else: self._real_add_job(job, True) return job def _remove_job(self, job): self._jobs.remove(job) # Notify listeners that a job has been removed event = SchedulerEvent(EVENT_SCHEDULER_JOB_REMOVED, job) self.notify_listeners(event) logger.info('Removed job "%s"', job) def add_date_job(self, func, date, args=None, kwargs=None, **options): """ Schedules a job to be completed on a specific date and time. :param func: callable to run at the given time :param date: the date/time to run the job at :param name: name of the job :param misfire_grace_time: seconds after the designated run time that the job is still allowed to be run :type date: :class:`datetime.date` :rtype: :class:`~napscheduler.job.Job` """ trigger = SimpleTrigger(date) return self.add_job(trigger, func, args, kwargs, **options) def add_interval_job(self, func, weeks=0, days=0, hours=0, minutes=0, seconds=0, start_date=None, args=None, kwargs=None, **options): """ Schedules a job to be completed on specified intervals. :param func: callable to run :param weeks: number of weeks to wait :param days: number of days to wait :param hours: number of hours to wait :param minutes: number of minutes to wait :param seconds: number of seconds to wait :param start_date: when to first execute the job and start the counter (default is after the given interval) :param args: list of positional arguments to call func with :param kwargs: dict of keyword arguments to call func with :param name: name of the job :param misfire_grace_time: seconds after the designated run time that the job is still allowed to be run :rtype: :class:`~napscheduler.job.Job` """ interval = timedelta(weeks=weeks, days=days, hours=hours, minutes=minutes, seconds=seconds) trigger = IntervalTrigger(interval, start_date) return self.add_job(trigger, func, args, kwargs, **options) def add_cron_job(self, func, year=None, month=None, day=None, week=None, day_of_week=None, hour=None, minute=None, second=None, start_date=None, args=None, kwargs=None, **options): """ Schedules a job to be completed on times that match the given expressions. :param func: callable to run :param year: year to run on :param month: month to run on :param day: day of month to run on :param week: week of the year to run on :param day_of_week: weekday to run on (0 = Monday) :param hour: hour to run on :param second: second to run on :param args: list of positional arguments to call func with :param kwargs: dict of keyword arguments to call func with :param name: name of the job :param misfire_grace_time: seconds after the designated run time that the job is still allowed to be run :return: the scheduled job :rtype: :class:`~napscheduler.job.Job` """ trigger = CronTrigger(year=year, month=month, day=day, week=week, day_of_week=day_of_week, hour=hour, minute=minute, second=second, start_date=start_date) return self.add_job(trigger, func, args, kwargs, **options) def cron_schedule(self, **options): """ Decorator version of :meth:`add_cron_job`. This decorator does not wrap its host function. Unscheduling decorated functions is possible by passing the ``job`` attribute of the scheduled function to :meth:`unschedule_job`. """ def inner(func): func.job = self.add_cron_job(func, **options) return func return inner def interval_schedule(self, **options): """ Decorator version of :meth:`add_interval_job`. This decorator does not wrap its host function. Unscheduling decorated functions is possible by passing the ``job`` attribute of the scheduled function to :meth:`unschedule_job`. """ def inner(func): func.job = self.add_interval_job(func, **options) return func return inner def get_jobs(self): """ Returns a list of all scheduled jobs. :return: list of :class:`~napscheduler.job.Job` objects """ return self._jobs def unschedule_job(self, job): """ Removes a job, preventing it from being run any more. """ self._jobs_lock.acquire() try: if job in list(self._jobs): self._remove_job(job) return finally: self._jobs_lock.release() raise KeyError('Job "%s" is not scheduled in any job store' % job) def unschedule_func(self, func): """ Removes all jobs that would execute the given function. """ found = False self._jobs_lock.acquire() try: for job in list(self._jobs): if job.func == func: self._remove_job(job) found = True finally: self._jobs_lock.release() if not found: raise KeyError('The given function is not scheduled in this ' 'scheduler') def print_jobs(self, out=None): """ Prints out a textual listing of all jobs currently scheduled on this scheduler. :param out: a file-like object to print to (defaults to **sys.stdout** if nothing is given) """ out = out or sys.stdout job_strs = [] self._jobs_lock.acquire() try: if self._jobs: for job in self._jobs: job_strs.append('%s' % job) else: job_strs.append('No scheduled jobs') finally: self._jobs_lock.release() out.write(os.linesep.join(job_strs) + os.linesep) def _run_job(self, job, run_times): """ Acts as a harness that runs the actual job code in a thread. """ for run_time in run_times: # See if the job missed its run time window, and handle possible # misfires accordingly difference = datetime.now() - run_time grace_time = timedelta(seconds=job.misfire_grace_time) if difference > grace_time: # Notify listeners about a missed run event = JobEvent(EVENT_JOB_MISSED, job, run_time) self.notify_listeners(event) job.notify_listeners(event) logger.warning('Run time of job "%s" was missed by %s', job, difference) else: try: job.add_instance() except MaxInstancesReachedError: event = JobEvent(EVENT_JOB_MISSED, job, run_time) self.notify_listeners(event) job.notify_listeners(event) logger.warning('Execution of job "%s" skipped: ' 'maximum number of running instances ' 'reached (%d)', job, job.max_instances) break logger.info('Running job "%s" (scheduled at %s)', job, run_time) try: retval = job.func(*job.args, **job.kwargs) except: # Notify listeners about the exception exc, tb = sys.exc_info()[1:] event = JobEvent(EVENT_JOB_ERROR, job, run_time, exception=exc, traceback=tb) self.notify_listeners(event) job.notify_listeners(event) logger.exception('Job "%s" raised an exception', job) else: # Notify listeners about successful execution event = JobEvent(EVENT_JOB_EXECUTED, job, run_time, retval=retval) self.notify_listeners(event) job.notify_listeners(event) logger.info('Job "%s" executed successfully', job) job.remove_instance() # If coalescing is enabled, don't attempt any further runs if job.coalesce: break def _process_jobs(self, now): """ Iterates through jobs, starts pending jobs and figures out the next wakeup time. """ next_wakeup_time = None self._jobs_lock.acquire() try: for job in tuple([job for job in self._jobs if job.active is True]): run_times = job.get_run_times(now) if run_times: self._threadpool.submit(self._run_job, job, run_times) # Increase the job's run count if job.coalesce: job.runs += 1 else: job.runs += len(run_times) # Don't keep finished jobs around if not job.compute_next_run_time(now + timedelta(microseconds=1)): self._remove_job(job) if not next_wakeup_time: next_wakeup_time = job.next_run_time elif job.next_run_time: next_wakeup_time = min(next_wakeup_time, job.next_run_time) return next_wakeup_time finally: self._jobs_lock.release() def _main_loop(self): """Executes jobs on schedule.""" logger.info('Scheduler started') self.notify_listeners(SchedulerEvent(EVENT_SCHEDULER_START)) self._wakeup.clear() while not self._stopped: logger.debug('Looking for jobs to run') now = datetime.now() next_wakeup_time = self._process_jobs(now) # Sleep until the next job is scheduled to be run, # a new job is added or the scheduler is stopped if next_wakeup_time is not None: wait_seconds = time_difference(next_wakeup_time, now) logger.debug('Next wakeup is due at %s (in %f seconds)', next_wakeup_time, wait_seconds) self._wakeup.wait(wait_seconds) else: logger.debug('No jobs; waiting until a job is added') self._wakeup.wait() self._wakeup.clear() logger.info('Scheduler has been shut down') self.notify_listeners(SchedulerEvent(EVENT_SCHEDULER_SHUTDOWN))
mit
bukalov/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/bot/queueengine.py
120
6867
# Copyright (c) 2009 Google Inc. All rights reserved. # Copyright (c) 2009 Apple Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import logging import sys import traceback from datetime import datetime, timedelta from webkitpy.common.system.executive import ScriptError from webkitpy.common.system.outputtee import OutputTee _log = logging.getLogger(__name__) # FIXME: This will be caught by "except Exception:" blocks, we should consider # making this inherit from SystemExit instead (or BaseException, except that's not recommended). class TerminateQueue(Exception): pass class QueueEngineDelegate: def queue_log_path(self): raise NotImplementedError, "subclasses must implement" def work_item_log_path(self, work_item): raise NotImplementedError, "subclasses must implement" def begin_work_queue(self): raise NotImplementedError, "subclasses must implement" def should_continue_work_queue(self): raise NotImplementedError, "subclasses must implement" def next_work_item(self): raise NotImplementedError, "subclasses must implement" def process_work_item(self, work_item): raise NotImplementedError, "subclasses must implement" def handle_unexpected_error(self, work_item, message): raise NotImplementedError, "subclasses must implement" class QueueEngine: def __init__(self, name, delegate, wakeup_event, seconds_to_sleep=120): self._name = name self._delegate = delegate self._wakeup_event = wakeup_event self._output_tee = OutputTee() self._seconds_to_sleep = seconds_to_sleep log_date_format = "%Y-%m-%d %H:%M:%S" handled_error_code = 2 # Child processes exit with a special code to the parent queue process can detect the error was handled. @classmethod def exit_after_handled_error(cls, error): _log.error(error) sys.exit(cls.handled_error_code) def run(self): self._begin_logging() self._delegate.begin_work_queue() while (self._delegate.should_continue_work_queue()): try: self._ensure_work_log_closed() work_item = self._delegate.next_work_item() if not work_item: self._sleep("No work item.") continue # FIXME: Work logs should not depend on bug_id specificaly. # This looks fixed, no? self._open_work_log(work_item) try: if not self._delegate.process_work_item(work_item): _log.warning("Unable to process work item.") continue except ScriptError, e: # Use a special exit code to indicate that the error was already # handled in the child process and we should just keep looping. if e.exit_code == self.handled_error_code: continue message = "Unexpected failure when processing patch! Please file a bug against webkit-patch.\n%s" % e.message_with_output() self._delegate.handle_unexpected_error(work_item, message) except TerminateQueue, e: self._stopping("TerminateQueue exception received.") return 0 except KeyboardInterrupt, e: self._stopping("User terminated queue.") return 1 except Exception, e: traceback.print_exc() # Don't try tell the status bot, in case telling it causes an exception. self._sleep("Exception while preparing queue") self._stopping("Delegate terminated queue.") return 0 def _stopping(self, message): _log.info("\n%s" % message) self._delegate.stop_work_queue(message) # Be careful to shut down our OutputTee or the unit tests will be unhappy. self._ensure_work_log_closed() self._output_tee.remove_log(self._queue_log) def _begin_logging(self): self._queue_log = self._output_tee.add_log(self._delegate.queue_log_path()) self._work_log = None def _open_work_log(self, work_item): work_item_log_path = self._delegate.work_item_log_path(work_item) if not work_item_log_path: return self._work_log = self._output_tee.add_log(work_item_log_path) def _ensure_work_log_closed(self): # If we still have a bug log open, close it. if self._work_log: self._output_tee.remove_log(self._work_log) self._work_log = None def _now(self): """Overriden by the unit tests to allow testing _sleep_message""" return datetime.now() def _sleep_message(self, message): wake_time = self._now() + timedelta(seconds=self._seconds_to_sleep) if self._seconds_to_sleep < 3 * 60: sleep_duration_text = str(self._seconds_to_sleep) + ' seconds' else: sleep_duration_text = str(round(self._seconds_to_sleep / 60)) + ' minutes' return "%s Sleeping until %s (%s)." % (message, wake_time.strftime(self.log_date_format), sleep_duration_text) def _sleep(self, message): _log.info(self._sleep_message(message)) self._wakeup_event.wait(self._seconds_to_sleep) self._wakeup_event.clear()
bsd-3-clause
Zimbra-Community/python-zimbra
tests/test_request_json.py
3
10019
""" Request tests """ import json from unittest import TestCase from pythonzimbra.request_json import RequestJson from pythonzimbra.exceptions.request import RequestHeaderContextException class TestRequestJson(TestCase): """ Request tests """ request = None """ The request to be tested against """ def cleanUp(self): """ Clean up after one step to leave a dedicated result for the other test cases. """ self.setUp() def setUp(self): self.request = RequestJson() def test_empty_request(self): """ Create an empty request and check the created xml """ expected_result = { "Body": {}, "Header": { "context": { "_jsns": "urn:zimbra", "format": { "type": "js" } } } } self.assertEqual( expected_result, json.loads(self.request.get_request()) ) def test_set_context_params_failtype(self): """ Add context parameters to the request and expect the method to send an exception """ self.assertRaises( RequestHeaderContextException, self.request.set_context_params, { 'invalidParam': { 'invalidAttribute': 'invalidValue' } } ) def test_set_context_params(self): """ Add all currently accepted params and check the result """ self.request.set_context_params( { 'authToken': { '_content': '1234567890abcdef' }, 'authTokenControl': { 'voidOnExpired': '1' }, 'session': { 'id': '1234567890abcdef', 'seq': '1234567890', 'type': 'admin' }, 'account': { 'by': 'name', '_content': 'user@zimbra.com' }, 'change': { 'token': '1234567890abcdef', 'type': 'new' }, 'targetServer': { '_content': 'mailboxserver.zimbra.com' }, 'userAgent': { 'name': 'Mozilla', 'version': '1.0' }, 'via': { '_content': 'proxyserver.zimbra.com' } } ) expected_result = { "Body": {}, "Header": { "context": { "authToken": { "_content": "1234567890abcdef" }, "account": { "by": "name", "_content": "user@zimbra.com" }, "via": { "_content": "proxyserver.zimbra.com" }, "targetServer": { "_content": "mailboxserver.zimbra.com" }, "format": { "type": "js" }, "_jsns": "urn:zimbra", "session": { "type": "admin", "id": "1234567890abcdef", "seq": "1234567890" }, "authTokenControl": { "voidOnExpired": "1" }, "userAgent": { "version": "1.0", "name": "Mozilla" }, "change": { "token": "1234567890abcdef", "type": "new" } } } } self.assertEqual( expected_result, json.loads(self.request.get_request()) ) # Clean up after this test self.cleanUp() def test_enable_batch_default(self): """ Test enabling batch requests """ # Check with default parameter self.request.enable_batch() expected_result = { "Body": { "BatchRequest": { "onerror": "continue", "_jsns": "urn:zimbra" } }, "Header": { "context": { "_jsns": "urn:zimbra", "format": { "type": "js" } } } } self.assertEqual( expected_result, json.loads(self.request.get_request()) ) # Clean up self.cleanUp() def test_enable_batch_stop(self): """ Test enabling batch requests with additional parameter """ self.request.enable_batch('stop') expected_result = { "Body": { "BatchRequest": { "onerror": "stop", "_jsns": "urn:zimbra" } }, "Header": { "context": { "_jsns": "urn:zimbra", "format": { "type": "js" } } } } self.assertEqual( expected_result, json.loads(self.request.get_request()) ) # Clean up self.cleanUp() def test_batch_add_request(self): """ Test adding multiple request to a batch request """ self.request.enable_batch() request_id = self.request.add_request( 'GetInfoRequest', { 'sections': 'mbox,prefs' }, "urn_zimbra" ) self.assertIsInstance( request_id, int, msg="Returned request_id for request 1 is not of type int, " "but of type %s" % ( type(request_id) ) ) self.assertEqual( 1, request_id, msg="Returned request_id for request 1 is not 1, but %s" % ( str(request_id) ) ) expected_result = { "Body": { "BatchRequest": { "onerror": "continue", "_jsns": "urn:zimbra", "GetInfoRequest": { "_jsns": "urn_zimbra", "sections": "mbox,prefs", "requestId": 1 } } }, "Header": { "context": { "_jsns": "urn:zimbra", "format": { "type": "js" } } } } self.assertEqual( expected_result, json.loads(self.request.get_request()) ) request_id = self.request.add_request( 'GetInfoRequest', { 'sections': 'zimlets' }, "urn:zimbra" ) self.assertIsInstance( request_id, int, msg="Returned request_id for request 2 is not of type int, " "but of type %s" % ( type(request_id) ) ) self.assertEqual( 2, request_id, msg="Returned request_id for request 2 is not 2, but %s" % ( str(request_id) ) ) expected_result = { "Body": { "BatchRequest": { "onerror": "continue", "_jsns": "urn:zimbra", "GetInfoRequest": [ { "_jsns": "urn_zimbra", "sections": "mbox,prefs", "requestId": 1 }, { "_jsns": "urn:zimbra", "sections": "zimlets", "requestId": 2 } ] } }, "Header": { "context": { "_jsns": "urn:zimbra", "format": { "type": "js" } } } } self.assertEqual( expected_result, json.loads(self.request.get_request()) ) # Clean up self.setUp() def test_add_request(self): """ Test adding a request """ request_id = self.request.add_request( 'GetInfoRequest', { 'sections': 'mbox,prefs' }, 'urn:zimbra' ) self.assertIsNone( request_id, msg="Returned request_id for request 1 is not none, " "but %s" % ( str(request_id) ) ) expected_result = { "Body": { "GetInfoRequest": { "_jsns": "urn:zimbra", "sections": "mbox,prefs" } }, "Header": { "context": { "_jsns": "urn:zimbra", "format": { "type": "js" } } } } self.assertEqual( expected_result, json.loads(self.request.get_request()) ) # Clean up self.setUp() def tearDown(self): self.request = None
bsd-2-clause
liulion/mayavi
tvtk/tests/test_indenter.py
2
9233
"""Tests for indenter.py.""" # Author: Prabhu Ramachandran # License: BSD style # Copyright (c) 2004, Enthought, Inc. import unittest import cStringIO from tvtk import indenter class TestIndent(unittest.TestCase): def test_basic(self): """Simple tests for indenter.""" id = indenter.Indent() self.assertEqual(str(id), '') id.incr() self.assertEqual(str(id), ' ') id.incr() self.assertEqual(str(id), ' ') id.decr() self.assertEqual(str(id), ' ') id.decr() self.assertEqual(str(id), '') id.incr(); id.incr() id.reset() self.assertEqual(str(id), '') def test_format(self): """Tests if formatting works ok.""" id = indenter.Indent() id.incr() # test one liner with trailing newlines txt = """class foo:\n\n \n \n""" t1 = id.format(txt) self.assertEqual(t1, ' class foo:\n') # test one liner with no trailing newline. txt = """class foo:""" t1 = id.format(txt) self.assertEqual(t1, ' class foo:\n') # test multi-line text. txt = """print "hi!" if name == 'hi': print "hi, hi!" """ res = """ print "hi!"\n if name == 'hi':\n print "hi, hi!"\n""" self.assertEqual(id.format(txt), res) txt = """ class Foo: def __init__(self): pass def _get_a(self): return self._a""" res = """ class Foo: def __init__(self): pass def _get_a(self): return self._a""" + '\n' self.assertEqual(id.format(txt), res) class TestVTKDocMassager(unittest.TestCase): def test_doc_massage(self): """Test massage method.""" doc = "This is a test. All VTK classes and vtk classes\n"\ "are named like this: vtkActor, vtkLODProperty,\n"\ "vtkXMLDataReader, vtk3DSImporter etc. The methods \n"\ "of a VTK object are like GetData, GetOutput, \n"\ "SetRepresentationToWireframe. Ivars are named like\n"\ "SpecularColor, Write3DPropsAsRasterImage etc." ret = "This is a test. All VTK classes and vtk classes\n"\ "are named like this: Actor, LODProperty,\n"\ "XMLDataReader, ThreeDSImporter etc. The methods \n"\ "of a VTK object are like get_data, get_output, \n"\ "set_representation_to_wireframe. Ivars are named like\n"\ "specular_color, write3d_props_as_raster_image etc." dm = indenter.VTKDocMassager() self.assertEqual(dm.massage(doc), ret) def test_rename_class(self): """Test if VTK classes are renamed correctly.""" dm = indenter.VTKDocMassager() t = 'vtkFooBar vtkXMLDataReader vtk3DSReader vtk2000Bug' r = dm._rename_class(t) correct = 'FooBar XMLDataReader ThreeDSReader Two000Bug' self.assertEqual(r, correct) def test_remove_sig(self): """Test if function signature is removed correctly.""" dm = indenter.VTKDocMassager() t = 'V.GetOutput(int) -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput (int idx);\n'\ 'V.GetOutput() -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput ();\n\n'\ ' Set/Get the output of this reader.\n' r = dm._remove_sig(t) correct = ' Set/Get the output of this reader.\n' self.assertEqual(r, correct) t = 'V.GetOutput(int) -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput (int idx);\n'\ 'V.GetOutput() -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput ();\n\n' r = dm._remove_sig(t) correct = '' self.assertEqual(r, correct) def test_class_doc(self): """Test if class docs are generated correctly.""" dm = indenter.VTKDocMassager() indent = indenter.Indent() out = cStringIO.StringIO() doc = "vtkLODProperty, vtkXMLDataReader, vtk3DSImporter\n"\ "SetRepresentationToWireframe, Write3DPropsAsRasterImage" dm.write_class_doc(doc, out, indent) out.seek(0) ret = out.read() correct = ''' """ LODProperty, XMLDataReader, ThreeDSImporter set_representation_to_wireframe, write3d_props_as_raster_image """\n''' #print ret #print correct self.assertEqual(ret, correct) # Test empty doc out = cStringIO.StringIO() doc = "" dm.write_class_doc(doc, out, indent) out.seek(0) ret = out.read() self.assertEqual(ret, ' """\n \n """\n') def test_trait_doc(self): """Test if trait docs are generated correctly.""" dm = indenter.VTKDocMassager() indent = indenter.Indent() out = cStringIO.StringIO() doc = 'V.GetOutput(int) -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput (int idx);\n'\ 'V.GetOutput() -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput ();\n\n'\ 'vtkLODProperty, vtkXMLDataReader, vtk3DSImporter\n'\ 'SetRepresentationToWireframe, Write3DPropsAsRasterImage' dm.write_trait_doc(doc, out, indent) out.seek(0) ret = out.read() correct = ''' """ LODProperty, XMLDataReader, ThreeDSImporter set_representation_to_wireframe, write3d_props_as_raster_image """\n''' #print ret #print correct self.assertEqual(ret, correct) # Test empty doc. out = cStringIO.StringIO() doc = 'V.GetOutput(int) -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput (int idx);\n'\ 'V.GetOutput() -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput ();\n\n' dm.write_trait_doc(doc, out, indent) out.seek(0) ret = out.read() self.assertEqual(ret, ' """\n \n """\n') def test_method_doc(self): """Test if method docs are generated correctly.""" dm = indenter.VTKDocMassager() indent = indenter.Indent() out = cStringIO.StringIO() doc = 'V.GetOutput(int) -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput (int idx);\n'\ 'V.GetOutput() -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput ();\n\n'\ 'vtkLODProperty, vtkXMLDataReader, vtk3DSImporter\n'\ 'SetRepresentationToWireframe, Write3DPropsAsRasterImage' dm.write_method_doc(doc, out, indent) out.seek(0) ret = out.read() correct = ''' """ V.get_output(int) -> StructuredPoints V.get_output() -> StructuredPoints LODProperty, XMLDataReader, ThreeDSImporter set_representation_to_wireframe, write3d_props_as_raster_image """\n''' #print ret #print correct self.assertEqual(ret, correct) # Test empty doc. out = cStringIO.StringIO() doc = 'V.GetOutput(int) -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput (int idx);\n'\ 'V.GetOutput() -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput ();\n\n' dm.write_method_doc(doc, out, indent) out.seek(0) ret = out.read() correct = ''' """ V.get_output(int) -> StructuredPoints V.get_output() -> StructuredPoints """\n''' #print ret #print correct self.assertEqual(ret, correct) def test_get_method_doc(self): """Test if get_method_doc works correctly.""" dm = indenter.VTKDocMassager() doc = 'V.GetOutput(int) -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput (int idx);\n'\ 'V.GetOutput() -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput ();\n\n'\ 'vtkLODProperty, vtkXMLDataReader, vtk3DSImporter\n'\ 'SetRepresentationToWireframe, Write3DPropsAsRasterImage' ret = dm.get_method_doc(doc) correct = 'V.get_output(int) -> StructuredPoints\n'\ 'V.get_output() -> StructuredPoints\n\n'\ 'LODProperty, XMLDataReader, ThreeDSImporter\n'\ 'set_representation_to_wireframe, '\ 'write3d_props_as_raster_image' #print ret #print correct self.assertEqual(ret, correct) # Test empty doc (only signature exists). doc = 'V.GetOutput(int) -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput (int idx);\n'\ 'V.GetOutput() -> vtkStructuredPoints\n'\ 'C++: vtkStructuredPoints *GetOutput ();\n\n' ret = dm.get_method_doc(doc) correct = 'V.get_output(int) -> StructuredPoints\n'\ 'V.get_output() -> StructuredPoints\n' self.assertEqual(ret, correct) if __name__ == "__main__": unittest.main()
bsd-3-clause
adit-chandra/tensorflow
tensorflow/lite/tutorials/mnist_tflite.py
5
2870
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Script to evaluate accuracy of TFLite flatbuffer model on mnist dataset.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf # pylint: disable=g-bad-import-order from tensorflow.lite.tutorials import dataset flags = tf.app.flags flags.DEFINE_string('data_dir', '/tmp/data_dir', 'Directory where data is stored.') flags.DEFINE_string('model_file', '', 'The path to the TFLite flatbuffer model file.') flags = flags.FLAGS def test_image_generator(): # Generates an iterator over images with tf.compat.v1.Session() as sess: input_data = tf.compat.v1.data.make_one_shot_iterator(dataset.test( flags.data_dir)).get_next() try: while True: yield sess.run(input_data) except tf.errors.OutOfRangeError: pass def run_eval(interpreter, input_image): """Performs evaluation for input image over specified model. Args: interpreter: TFLite interpreter initialized with model to execute. input_image: Image input to the model. Returns: output: output tensor of model being executed. """ # Get input and output tensors. input_details = interpreter.get_input_details() output_details = interpreter.get_output_details() # Test model on the input images. input_image = np.reshape(input_image, input_details[0]['shape']) interpreter.set_tensor(input_details[0]['index'], input_image) interpreter.invoke() output_data = interpreter.get_tensor(output_details[0]['index']) output = np.squeeze(output_data) return output def main(_): interpreter = tf.lite.Interpreter(model_path=flags.model_file) interpreter.allocate_tensors() num_correct, total = 0, 0 for input_data in test_image_generator(): output = run_eval(interpreter, input_data[0]) total += 1 if output == input_data[1]: num_correct += 1 if total % 500 == 0: print('Accuracy after %i images: %f' % (total, float(num_correct) / float(total))) if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) tf.compat.v1.app.run(main)
apache-2.0
Sir-Henry-Curtis/Ironworks
lib/jinja2/testsuite/regression.py
90
7583
# -*- coding: utf-8 -*- """ jinja2.testsuite.regression ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tests corner cases and bugs. :copyright: (c) 2010 by the Jinja Team. :license: BSD, see LICENSE for more details. """ import unittest from jinja2.testsuite import JinjaTestCase from jinja2 import Template, Environment, DictLoader, TemplateSyntaxError, \ TemplateNotFound, PrefixLoader env = Environment() class CornerTestCase(JinjaTestCase): def test_assigned_scoping(self): t = env.from_string(''' {%- for item in (1, 2, 3, 4) -%} [{{ item }}] {%- endfor %} {{- item -}} ''') assert t.render(item=42) == '[1][2][3][4]42' t = env.from_string(''' {%- for item in (1, 2, 3, 4) -%} [{{ item }}] {%- endfor %} {%- set item = 42 %} {{- item -}} ''') assert t.render() == '[1][2][3][4]42' t = env.from_string(''' {%- set item = 42 %} {%- for item in (1, 2, 3, 4) -%} [{{ item }}] {%- endfor %} {{- item -}} ''') assert t.render() == '[1][2][3][4]42' def test_closure_scoping(self): t = env.from_string(''' {%- set wrapper = "<FOO>" %} {%- for item in (1, 2, 3, 4) %} {%- macro wrapper() %}[{{ item }}]{% endmacro %} {{- wrapper() }} {%- endfor %} {{- wrapper -}} ''') assert t.render() == '[1][2][3][4]<FOO>' t = env.from_string(''' {%- for item in (1, 2, 3, 4) %} {%- macro wrapper() %}[{{ item }}]{% endmacro %} {{- wrapper() }} {%- endfor %} {%- set wrapper = "<FOO>" %} {{- wrapper -}} ''') assert t.render() == '[1][2][3][4]<FOO>' t = env.from_string(''' {%- for item in (1, 2, 3, 4) %} {%- macro wrapper() %}[{{ item }}]{% endmacro %} {{- wrapper() }} {%- endfor %} {{- wrapper -}} ''') assert t.render(wrapper=23) == '[1][2][3][4]23' class BugTestCase(JinjaTestCase): def test_keyword_folding(self): env = Environment() env.filters['testing'] = lambda value, some: value + some assert env.from_string("{{ 'test'|testing(some='stuff') }}") \ .render() == 'teststuff' def test_extends_output_bugs(self): env = Environment(loader=DictLoader({ 'parent.html': '(({% block title %}{% endblock %}))' })) t = env.from_string('{% if expr %}{% extends "parent.html" %}{% endif %}' '[[{% block title %}title{% endblock %}]]' '{% for item in [1, 2, 3] %}({{ item }}){% endfor %}') assert t.render(expr=False) == '[[title]](1)(2)(3)' assert t.render(expr=True) == '((title))' def test_urlize_filter_escaping(self): tmpl = env.from_string('{{ "http://www.example.org/<foo"|urlize }}') assert tmpl.render() == '<a href="http://www.example.org/&lt;foo">http://www.example.org/&lt;foo</a>' def test_loop_call_loop(self): tmpl = env.from_string(''' {% macro test() %} {{ caller() }} {% endmacro %} {% for num1 in range(5) %} {% call test() %} {% for num2 in range(10) %} {{ loop.index }} {% endfor %} {% endcall %} {% endfor %} ''') assert tmpl.render().split() == map(unicode, range(1, 11)) * 5 def test_weird_inline_comment(self): env = Environment(line_statement_prefix='%') self.assert_raises(TemplateSyntaxError, env.from_string, '% for item in seq {# missing #}\n...% endfor') def test_old_macro_loop_scoping_bug(self): tmpl = env.from_string('{% for i in (1, 2) %}{{ i }}{% endfor %}' '{% macro i() %}3{% endmacro %}{{ i() }}') assert tmpl.render() == '123' def test_partial_conditional_assignments(self): tmpl = env.from_string('{% if b %}{% set a = 42 %}{% endif %}{{ a }}') assert tmpl.render(a=23) == '23' assert tmpl.render(b=True) == '42' def test_stacked_locals_scoping_bug(self): env = Environment(line_statement_prefix='#') t = env.from_string('''\ # for j in [1, 2]: # set x = 1 # for i in [1, 2]: # print x # if i % 2 == 0: # set x = x + 1 # endif # endfor # endfor # if a # print 'A' # elif b # print 'B' # elif c == d # print 'C' # else # print 'D' # endif ''') assert t.render(a=0, b=False, c=42, d=42.0) == '1111C' def test_stacked_locals_scoping_bug_twoframe(self): t = Template(''' {% set x = 1 %} {% for item in foo %} {% if item == 1 %} {% set x = 2 %} {% endif %} {% endfor %} {{ x }} ''') rv = t.render(foo=[1]).strip() assert rv == u'1' def test_call_with_args(self): t = Template("""{% macro dump_users(users) -%} <ul> {%- for user in users -%} <li><p>{{ user.username|e }}</p>{{ caller(user) }}</li> {%- endfor -%} </ul> {%- endmacro -%} {% call(user) dump_users(list_of_user) -%} <dl> <dl>Realname</dl> <dd>{{ user.realname|e }}</dd> <dl>Description</dl> <dd>{{ user.description }}</dd> </dl> {% endcall %}""") assert [x.strip() for x in t.render(list_of_user=[{ 'username':'apo', 'realname':'something else', 'description':'test' }]).splitlines()] == [ u'<ul><li><p>apo</p><dl>', u'<dl>Realname</dl>', u'<dd>something else</dd>', u'<dl>Description</dl>', u'<dd>test</dd>', u'</dl>', u'</li></ul>' ] def test_empty_if_condition_fails(self): self.assert_raises(TemplateSyntaxError, Template, '{% if %}....{% endif %}') self.assert_raises(TemplateSyntaxError, Template, '{% if foo %}...{% elif %}...{% endif %}') self.assert_raises(TemplateSyntaxError, Template, '{% for x in %}..{% endfor %}') def test_recursive_loop_bug(self): tpl1 = Template(""" {% for p in foo recursive%} {{p.bar}} {% for f in p.fields recursive%} {{f.baz}} {{p.bar}} {% if f.rec %} {{ loop(f.sub) }} {% endif %} {% endfor %} {% endfor %} """) tpl2 = Template(""" {% for p in foo%} {{p.bar}} {% for f in p.fields recursive%} {{f.baz}} {{p.bar}} {% if f.rec %} {{ loop(f.sub) }} {% endif %} {% endfor %} {% endfor %} """) def test_correct_prefix_loader_name(self): env = Environment(loader=PrefixLoader({ 'foo': DictLoader({}) })) try: env.get_template('foo/bar.html') except TemplateNotFound, e: assert e.name == 'foo/bar.html' else: assert False, 'expected error here' def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(CornerTestCase)) suite.addTest(unittest.makeSuite(BugTestCase)) return suite
mit
vertigo235/Sick-Beard-XEM
cherrypy/_cptools.py
35
19648
"""CherryPy tools. A "tool" is any helper, adapted to CP. Tools are usually designed to be used in a variety of ways (although some may only offer one if they choose): Library calls: All tools are callables that can be used wherever needed. The arguments are straightforward and should be detailed within the docstring. Function decorators: All tools, when called, may be used as decorators which configure individual CherryPy page handlers (methods on the CherryPy tree). That is, "@tools.anytool()" should "turn on" the tool via the decorated function's _cp_config attribute. CherryPy config: If a tool exposes a "_setup" callable, it will be called once per Request (if the feature is "turned on" via config). Tools may be implemented as any object with a namespace. The builtins are generally either modules or instances of the tools.Tool class. """ import cherrypy import warnings def _getargs(func): """Return the names of all static arguments to the given function.""" # Use this instead of importing inspect for less mem overhead. import types if isinstance(func, types.MethodType): func = func.im_func co = func.func_code return co.co_varnames[:co.co_argcount] _attr_error = ("CherryPy Tools cannot be turned on directly. Instead, turn them " "on via config, or use them as decorators on your page handlers.") class Tool(object): """A registered function for use with CherryPy request-processing hooks. help(tool.callable) should give you more information about this Tool. """ namespace = "tools" def __init__(self, point, callable, name=None, priority=50): self._point = point self.callable = callable self._name = name self._priority = priority self.__doc__ = self.callable.__doc__ self._setargs() def _get_on(self): raise AttributeError(_attr_error) def _set_on(self, value): raise AttributeError(_attr_error) on = property(_get_on, _set_on) def _setargs(self): """Copy func parameter names to obj attributes.""" try: for arg in _getargs(self.callable): setattr(self, arg, None) except (TypeError, AttributeError): if hasattr(self.callable, "__call__"): for arg in _getargs(self.callable.__call__): setattr(self, arg, None) # IronPython 1.0 raises NotImplementedError because # inspect.getargspec tries to access Python bytecode # in co_code attribute. except NotImplementedError: pass # IronPython 1B1 may raise IndexError in some cases, # but if we trap it here it doesn't prevent CP from working. except IndexError: pass def _merged_args(self, d=None): """Return a dict of configuration entries for this Tool.""" if d: conf = d.copy() else: conf = {} tm = cherrypy.serving.request.toolmaps[self.namespace] if self._name in tm: conf.update(tm[self._name]) if "on" in conf: del conf["on"] return conf def __call__(self, *args, **kwargs): """Compile-time decorator (turn on the tool in config). For example: @tools.proxy() def whats_my_base(self): return cherrypy.request.base whats_my_base.exposed = True """ if args: raise TypeError("The %r Tool does not accept positional " "arguments; you must use keyword arguments." % self._name) def tool_decorator(f): if not hasattr(f, "_cp_config"): f._cp_config = {} subspace = self.namespace + "." + self._name + "." f._cp_config[subspace + "on"] = True for k, v in kwargs.items(): f._cp_config[subspace + k] = v return f return tool_decorator def _setup(self): """Hook this tool into cherrypy.request. The standard CherryPy request object will automatically call this method when the tool is "turned on" in config. """ conf = self._merged_args() p = conf.pop("priority", None) if p is None: p = getattr(self.callable, "priority", self._priority) cherrypy.serving.request.hooks.attach(self._point, self.callable, priority=p, **conf) class HandlerTool(Tool): """Tool which is called 'before main', that may skip normal handlers. If the tool successfully handles the request (by setting response.body), if should return True. This will cause CherryPy to skip any 'normal' page handler. If the tool did not handle the request, it should return False to tell CherryPy to continue on and call the normal page handler. If the tool is declared AS a page handler (see the 'handler' method), returning False will raise NotFound. """ def __init__(self, callable, name=None): Tool.__init__(self, 'before_handler', callable, name) def handler(self, *args, **kwargs): """Use this tool as a CherryPy page handler. For example: class Root: nav = tools.staticdir.handler(section="/nav", dir="nav", root=absDir) """ def handle_func(*a, **kw): handled = self.callable(*args, **self._merged_args(kwargs)) if not handled: raise cherrypy.NotFound() return cherrypy.serving.response.body handle_func.exposed = True return handle_func def _wrapper(self, **kwargs): if self.callable(**kwargs): cherrypy.serving.request.handler = None def _setup(self): """Hook this tool into cherrypy.request. The standard CherryPy request object will automatically call this method when the tool is "turned on" in config. """ conf = self._merged_args() p = conf.pop("priority", None) if p is None: p = getattr(self.callable, "priority", self._priority) cherrypy.serving.request.hooks.attach(self._point, self._wrapper, priority=p, **conf) class HandlerWrapperTool(Tool): """Tool which wraps request.handler in a provided wrapper function. The 'newhandler' arg must be a handler wrapper function that takes a 'next_handler' argument, plus *args and **kwargs. Like all page handler functions, it must return an iterable for use as cherrypy.response.body. For example, to allow your 'inner' page handlers to return dicts which then get interpolated into a template: def interpolator(next_handler, *args, **kwargs): filename = cherrypy.request.config.get('template') cherrypy.response.template = env.get_template(filename) response_dict = next_handler(*args, **kwargs) return cherrypy.response.template.render(**response_dict) cherrypy.tools.jinja = HandlerWrapperTool(interpolator) """ def __init__(self, newhandler, point='before_handler', name=None, priority=50): self.newhandler = newhandler self._point = point self._name = name self._priority = priority def callable(self, debug=False): innerfunc = cherrypy.serving.request.handler def wrap(*args, **kwargs): return self.newhandler(innerfunc, *args, **kwargs) cherrypy.serving.request.handler = wrap class ErrorTool(Tool): """Tool which is used to replace the default request.error_response.""" def __init__(self, callable, name=None): Tool.__init__(self, None, callable, name) def _wrapper(self): self.callable(**self._merged_args()) def _setup(self): """Hook this tool into cherrypy.request. The standard CherryPy request object will automatically call this method when the tool is "turned on" in config. """ cherrypy.serving.request.error_response = self._wrapper # Builtin tools # from cherrypy.lib import cptools, encoding, auth, static, jsontools from cherrypy.lib import sessions as _sessions, xmlrpc as _xmlrpc from cherrypy.lib import caching as _caching from cherrypy.lib import auth_basic, auth_digest class SessionTool(Tool): """Session Tool for CherryPy. sessions.locking: When 'implicit' (the default), the session will be locked for you, just before running the page handler. When 'early', the session will be locked before reading the request body. This is off by default for safety reasons; for example, a large upload would block the session, denying an AJAX progress meter (see http://www.cherrypy.org/ticket/630). When 'explicit' (or any other value), you need to call cherrypy.session.acquire_lock() yourself before using session data. """ def __init__(self): # _sessions.init must be bound after headers are read Tool.__init__(self, 'before_request_body', _sessions.init) def _lock_session(self): cherrypy.serving.session.acquire_lock() def _setup(self): """Hook this tool into cherrypy.request. The standard CherryPy request object will automatically call this method when the tool is "turned on" in config. """ hooks = cherrypy.serving.request.hooks conf = self._merged_args() p = conf.pop("priority", None) if p is None: p = getattr(self.callable, "priority", self._priority) hooks.attach(self._point, self.callable, priority=p, **conf) locking = conf.pop('locking', 'implicit') if locking == 'implicit': hooks.attach('before_handler', self._lock_session) elif locking == 'early': # Lock before the request body (but after _sessions.init runs!) hooks.attach('before_request_body', self._lock_session, priority=60) else: # Don't lock pass hooks.attach('before_finalize', _sessions.save) hooks.attach('on_end_request', _sessions.close) def regenerate(self): """Drop the current session and make a new one (with a new id).""" sess = cherrypy.serving.session sess.regenerate() # Grab cookie-relevant tool args conf = dict([(k, v) for k, v in self._merged_args().items() if k in ('path', 'path_header', 'name', 'timeout', 'domain', 'secure')]) _sessions.set_response_cookie(**conf) class XMLRPCController(object): """A Controller (page handler collection) for XML-RPC. To use it, have your controllers subclass this base class (it will turn on the tool for you). You can also supply the following optional config entries: tools.xmlrpc.encoding: 'utf-8' tools.xmlrpc.allow_none: 0 XML-RPC is a rather discontinuous layer over HTTP; dispatching to the appropriate handler must first be performed according to the URL, and then a second dispatch step must take place according to the RPC method specified in the request body. It also allows a superfluous "/RPC2" prefix in the URL, supplies its own handler args in the body, and requires a 200 OK "Fault" response instead of 404 when the desired method is not found. Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone. This Controller acts as the dispatch target for the first half (based on the URL); it then reads the RPC method from the request body and does its own second dispatch step based on that method. It also reads body params, and returns a Fault on error. The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2 in your URL's, you can safely skip turning on the XMLRPCDispatcher. Otherwise, you need to use declare it in config: request.dispatch: cherrypy.dispatch.XMLRPCDispatcher() """ # Note we're hard-coding this into the 'tools' namespace. We could do # a huge amount of work to make it relocatable, but the only reason why # would be if someone actually disabled the default_toolbox. Meh. _cp_config = {'tools.xmlrpc.on': True} def default(self, *vpath, **params): rpcparams, rpcmethod = _xmlrpc.process_body() subhandler = self for attr in str(rpcmethod).split('.'): subhandler = getattr(subhandler, attr, None) if subhandler and getattr(subhandler, "exposed", False): body = subhandler(*(vpath + rpcparams), **params) else: # http://www.cherrypy.org/ticket/533 # if a method is not found, an xmlrpclib.Fault should be returned # raising an exception here will do that; see # cherrypy.lib.xmlrpc.on_error raise Exception('method "%s" is not supported' % attr) conf = cherrypy.serving.request.toolmaps['tools'].get("xmlrpc", {}) _xmlrpc.respond(body, conf.get('encoding', 'utf-8'), conf.get('allow_none', 0)) return cherrypy.serving.response.body default.exposed = True class SessionAuthTool(HandlerTool): def _setargs(self): for name in dir(cptools.SessionAuth): if not name.startswith("__"): setattr(self, name, None) class CachingTool(Tool): """Caching Tool for CherryPy.""" def _wrapper(self, **kwargs): request = cherrypy.serving.request if _caching.get(**kwargs): request.handler = None else: if request.cacheable: # Note the devious technique here of adding hooks on the fly request.hooks.attach('before_finalize', _caching.tee_output, priority=90) _wrapper.priority = 20 def _setup(self): """Hook caching into cherrypy.request.""" conf = self._merged_args() p = conf.pop("priority", None) cherrypy.serving.request.hooks.attach('before_handler', self._wrapper, priority=p, **conf) class Toolbox(object): """A collection of Tools. This object also functions as a config namespace handler for itself. Custom toolboxes should be added to each Application's toolboxes dict. """ def __init__(self, namespace): self.namespace = namespace def __setattr__(self, name, value): # If the Tool._name is None, supply it from the attribute name. if isinstance(value, Tool): if value._name is None: value._name = name value.namespace = self.namespace object.__setattr__(self, name, value) def __enter__(self): """Populate request.toolmaps from tools specified in config.""" cherrypy.serving.request.toolmaps[self.namespace] = map = {} def populate(k, v): toolname, arg = k.split(".", 1) bucket = map.setdefault(toolname, {}) bucket[arg] = v return populate def __exit__(self, exc_type, exc_val, exc_tb): """Run tool._setup() for each tool in our toolmap.""" map = cherrypy.serving.request.toolmaps.get(self.namespace) if map: for name, settings in map.items(): if settings.get("on", False): tool = getattr(self, name) tool._setup() class DeprecatedTool(Tool): _name = None warnmsg = "This Tool is deprecated." def __init__(self, point, warnmsg=None): self.point = point if warnmsg is not None: self.warnmsg = warnmsg def __call__(self, *args, **kwargs): warnings.warn(self.warnmsg) def tool_decorator(f): return f return tool_decorator def _setup(self): warnings.warn(self.warnmsg) default_toolbox = _d = Toolbox("tools") _d.session_auth = SessionAuthTool(cptools.session_auth) _d.proxy = Tool('before_request_body', cptools.proxy, priority=30) _d.response_headers = Tool('on_start_resource', cptools.response_headers) _d.log_tracebacks = Tool('before_error_response', cptools.log_traceback) _d.log_headers = Tool('before_error_response', cptools.log_request_headers) _d.log_hooks = Tool('on_end_request', cptools.log_hooks, priority=100) _d.err_redirect = ErrorTool(cptools.redirect) _d.etags = Tool('before_finalize', cptools.validate_etags, priority=75) _d.decode = Tool('before_request_body', encoding.decode) # the order of encoding, gzip, caching is important _d.encode = Tool('before_handler', encoding.ResponseEncoder, priority=70) _d.gzip = Tool('before_finalize', encoding.gzip, priority=80) _d.staticdir = HandlerTool(static.staticdir) _d.staticfile = HandlerTool(static.staticfile) _d.sessions = SessionTool() _d.xmlrpc = ErrorTool(_xmlrpc.on_error) _d.caching = CachingTool('before_handler', _caching.get, 'caching') _d.expires = Tool('before_finalize', _caching.expires) _d.tidy = DeprecatedTool('before_finalize', "The tidy tool has been removed from the standard distribution of CherryPy. " "The most recent version can be found at http://tools.cherrypy.org/browser.") _d.nsgmls = DeprecatedTool('before_finalize', "The nsgmls tool has been removed from the standard distribution of CherryPy. " "The most recent version can be found at http://tools.cherrypy.org/browser.") _d.ignore_headers = Tool('before_request_body', cptools.ignore_headers) _d.referer = Tool('before_request_body', cptools.referer) _d.basic_auth = Tool('on_start_resource', auth.basic_auth) _d.digest_auth = Tool('on_start_resource', auth.digest_auth) _d.trailing_slash = Tool('before_handler', cptools.trailing_slash, priority=60) _d.flatten = Tool('before_finalize', cptools.flatten) _d.accept = Tool('on_start_resource', cptools.accept) _d.redirect = Tool('on_start_resource', cptools.redirect) _d.autovary = Tool('on_start_resource', cptools.autovary, priority=0) _d.json_in = Tool('before_request_body', jsontools.json_in, priority=30) _d.json_out = Tool('before_handler', jsontools.json_out, priority=30) _d.auth_basic = Tool('before_handler', auth_basic.basic_auth, priority=1) _d.auth_digest = Tool('before_handler', auth_digest.digest_auth, priority=1) del _d, cptools, encoding, auth, static
gpl-3.0
SetBased/py-etlt
test/helper/AllenTest.py
1
2762
import unittest from etlt.helper.Allen import Allen class AllenTest(unittest.TestCase): """ Test cases for class Allen algebra. """ # ------------------------------------------------------------------------------------------------------------------ def _test1(self, expected, x, y): relation1 = Allen.relation(x[0], x[1], y[0], y[1]) self.assertEqual(expected, relation1) relation2 = Allen.relation(y[0], y[1], x[0], x[1]) self.assertEqual(relation1, -1 * relation2) relation3 = Allen.relation(x[1], x[0], y[0], y[1]) self.assertIsNone(relation3) relation4 = Allen.relation(x[0], x[1], y[1], y[0]) self.assertIsNone(relation4) relation5 = Allen.relation(x[1], x[0], y[1], y[0]) self.assertIsNone(relation5) relation6 = Allen.relation(y[1], y[0], x[0], x[1]) self.assertIsNone(relation6) relation7 = Allen.relation(y[0], y[1], x[1], x[0]) self.assertIsNone(relation7) relation8 = Allen.relation(y[1], y[0], x[1], x[0]) self.assertIsNone(relation8) # ------------------------------------------------------------------------------------------------------------------ def test_x_takes_place_before_y(self): self._test1(Allen.X_BEFORE_Y, (1, 3), (5, 7)) # ------------------------------------------------------------------------------------------------------------------ def test_x_meets_y(self): self._test1(Allen.X_MEETS_Y, (1, 2), (3, 5)) # ------------------------------------------------------------------------------------------------------------------ def test_x_overlaps_with_y(self): self._test1(Allen.X_OVERLAPS_WITH_Y, (1, 4), (3, 5)) self._test1(Allen.X_OVERLAPS_WITH_Y, (1, 3), (3, 5)) # ------------------------------------------------------------------------------------------------------------------ def test_x_starts_y(self): self._test1(Allen.X_STARTS_Y, (1, 2), (1, 5)) # ------------------------------------------------------------------------------------------------------------------ def test_x_during_y(self): self._test1(Allen.X_DURING_Y, (2, 3), (1, 5)) # ------------------------------------------------------------------------------------------------------------------ def test_x_finish_y(self): self._test1(Allen.X_FINISHES_Y, (3, 5), (1, 5)) # ------------------------------------------------------------------------------------------------------------------ def test_x_equal_y(self): self._test1(Allen.X_EQUAL_Y, (1, 5), (1, 5)) # ----------------------------------------------------------------------------------------------------------------------
mit
nikesh-mahalka/nova
nova/api/openstack/compute/schemas/fixed_ips.py
79
1027
# Copyright 2015 Intel Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.api.validation import parameter_types reserve = { 'type': 'object', 'properties': { 'reserve': parameter_types.none, }, 'required': ['reserve'], 'additionalProperties': False, } unreserve = { 'type': 'object', 'properties': { 'unreserve': parameter_types.none, }, 'required': ['unreserve'], 'additionalProperties': False, }
apache-2.0
msebire/intellij-community
python/helpers/pydev/pydevd_attach_to_process/winappdbg/win32/context_i386.py
102
16108
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2014, Mario Vilas # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice,this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """ CONTEXT structure for i386. """ __revision__ = "$Id$" from winappdbg.win32.defines import * from winappdbg.win32.version import ARCH_I386 #============================================================================== # This is used later on to calculate the list of exported symbols. _all = None _all = set(vars().keys()) #============================================================================== #--- CONTEXT structures and constants ----------------------------------------- # The following values specify the type of access in the first parameter # of the exception record when the exception code specifies an access # violation. EXCEPTION_READ_FAULT = 0 # exception caused by a read EXCEPTION_WRITE_FAULT = 1 # exception caused by a write EXCEPTION_EXECUTE_FAULT = 8 # exception caused by an instruction fetch CONTEXT_i386 = 0x00010000 # this assumes that i386 and CONTEXT_i486 = 0x00010000 # i486 have identical context records CONTEXT_CONTROL = (CONTEXT_i386 | long(0x00000001)) # SS:SP, CS:IP, FLAGS, BP CONTEXT_INTEGER = (CONTEXT_i386 | long(0x00000002)) # AX, BX, CX, DX, SI, DI CONTEXT_SEGMENTS = (CONTEXT_i386 | long(0x00000004)) # DS, ES, FS, GS CONTEXT_FLOATING_POINT = (CONTEXT_i386 | long(0x00000008)) # 387 state CONTEXT_DEBUG_REGISTERS = (CONTEXT_i386 | long(0x00000010)) # DB 0-3,6,7 CONTEXT_EXTENDED_REGISTERS = (CONTEXT_i386 | long(0x00000020)) # cpu specific extensions CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_INTEGER | CONTEXT_SEGMENTS) CONTEXT_ALL = (CONTEXT_CONTROL | CONTEXT_INTEGER | CONTEXT_SEGMENTS | \ CONTEXT_FLOATING_POINT | CONTEXT_DEBUG_REGISTERS | \ CONTEXT_EXTENDED_REGISTERS) SIZE_OF_80387_REGISTERS = 80 MAXIMUM_SUPPORTED_EXTENSION = 512 # typedef struct _FLOATING_SAVE_AREA { # DWORD ControlWord; # DWORD StatusWord; # DWORD TagWord; # DWORD ErrorOffset; # DWORD ErrorSelector; # DWORD DataOffset; # DWORD DataSelector; # BYTE RegisterArea[SIZE_OF_80387_REGISTERS]; # DWORD Cr0NpxState; # } FLOATING_SAVE_AREA; class FLOATING_SAVE_AREA(Structure): _pack_ = 1 _fields_ = [ ('ControlWord', DWORD), ('StatusWord', DWORD), ('TagWord', DWORD), ('ErrorOffset', DWORD), ('ErrorSelector', DWORD), ('DataOffset', DWORD), ('DataSelector', DWORD), ('RegisterArea', BYTE * SIZE_OF_80387_REGISTERS), ('Cr0NpxState', DWORD), ] _integer_members = ('ControlWord', 'StatusWord', 'TagWord', 'ErrorOffset', 'ErrorSelector', 'DataOffset', 'DataSelector', 'Cr0NpxState') @classmethod def from_dict(cls, fsa): 'Instance a new structure from a Python dictionary.' fsa = dict(fsa) s = cls() for key in cls._integer_members: setattr(s, key, fsa.get(key)) ra = fsa.get('RegisterArea', None) if ra is not None: for index in compat.xrange(0, SIZE_OF_80387_REGISTERS): s.RegisterArea[index] = ra[index] return s def to_dict(self): 'Convert a structure into a Python dictionary.' fsa = dict() for key in self._integer_members: fsa[key] = getattr(self, key) ra = [ self.RegisterArea[index] for index in compat.xrange(0, SIZE_OF_80387_REGISTERS) ] ra = tuple(ra) fsa['RegisterArea'] = ra return fsa PFLOATING_SAVE_AREA = POINTER(FLOATING_SAVE_AREA) LPFLOATING_SAVE_AREA = PFLOATING_SAVE_AREA # typedef struct _CONTEXT { # DWORD ContextFlags; # DWORD Dr0; # DWORD Dr1; # DWORD Dr2; # DWORD Dr3; # DWORD Dr6; # DWORD Dr7; # FLOATING_SAVE_AREA FloatSave; # DWORD SegGs; # DWORD SegFs; # DWORD SegEs; # DWORD SegDs; # DWORD Edi; # DWORD Esi; # DWORD Ebx; # DWORD Edx; # DWORD Ecx; # DWORD Eax; # DWORD Ebp; # DWORD Eip; # DWORD SegCs; # DWORD EFlags; # DWORD Esp; # DWORD SegSs; # BYTE ExtendedRegisters[MAXIMUM_SUPPORTED_EXTENSION]; # } CONTEXT; class CONTEXT(Structure): arch = ARCH_I386 _pack_ = 1 # Context Frame # # This frame has a several purposes: 1) it is used as an argument to # NtContinue, 2) is is used to constuct a call frame for APC delivery, # and 3) it is used in the user level thread creation routines. # # The layout of the record conforms to a standard call frame. _fields_ = [ # The flags values within this flag control the contents of # a CONTEXT record. # # If the context record is used as an input parameter, then # for each portion of the context record controlled by a flag # whose value is set, it is assumed that that portion of the # context record contains valid context. If the context record # is being used to modify a threads context, then only that # portion of the threads context will be modified. # # If the context record is used as an IN OUT parameter to capture # the context of a thread, then only those portions of the thread's # context corresponding to set flags will be returned. # # The context record is never used as an OUT only parameter. ('ContextFlags', DWORD), # This section is specified/returned if CONTEXT_DEBUG_REGISTERS is # set in ContextFlags. Note that CONTEXT_DEBUG_REGISTERS is NOT # included in CONTEXT_FULL. ('Dr0', DWORD), ('Dr1', DWORD), ('Dr2', DWORD), ('Dr3', DWORD), ('Dr6', DWORD), ('Dr7', DWORD), # This section is specified/returned if the # ContextFlags word contains the flag CONTEXT_FLOATING_POINT. ('FloatSave', FLOATING_SAVE_AREA), # This section is specified/returned if the # ContextFlags word contains the flag CONTEXT_SEGMENTS. ('SegGs', DWORD), ('SegFs', DWORD), ('SegEs', DWORD), ('SegDs', DWORD), # This section is specified/returned if the # ContextFlags word contains the flag CONTEXT_INTEGER. ('Edi', DWORD), ('Esi', DWORD), ('Ebx', DWORD), ('Edx', DWORD), ('Ecx', DWORD), ('Eax', DWORD), # This section is specified/returned if the # ContextFlags word contains the flag CONTEXT_CONTROL. ('Ebp', DWORD), ('Eip', DWORD), ('SegCs', DWORD), # MUST BE SANITIZED ('EFlags', DWORD), # MUST BE SANITIZED ('Esp', DWORD), ('SegSs', DWORD), # This section is specified/returned if the ContextFlags word # contains the flag CONTEXT_EXTENDED_REGISTERS. # The format and contexts are processor specific. ('ExtendedRegisters', BYTE * MAXIMUM_SUPPORTED_EXTENSION), ] _ctx_debug = ('Dr0', 'Dr1', 'Dr2', 'Dr3', 'Dr6', 'Dr7') _ctx_segs = ('SegGs', 'SegFs', 'SegEs', 'SegDs', ) _ctx_int = ('Edi', 'Esi', 'Ebx', 'Edx', 'Ecx', 'Eax') _ctx_ctrl = ('Ebp', 'Eip', 'SegCs', 'EFlags', 'Esp', 'SegSs') @classmethod def from_dict(cls, ctx): 'Instance a new structure from a Python dictionary.' ctx = Context(ctx) s = cls() ContextFlags = ctx['ContextFlags'] setattr(s, 'ContextFlags', ContextFlags) if (ContextFlags & CONTEXT_DEBUG_REGISTERS) == CONTEXT_DEBUG_REGISTERS: for key in s._ctx_debug: setattr(s, key, ctx[key]) if (ContextFlags & CONTEXT_FLOATING_POINT) == CONTEXT_FLOATING_POINT: fsa = ctx['FloatSave'] s.FloatSave = FLOATING_SAVE_AREA.from_dict(fsa) if (ContextFlags & CONTEXT_SEGMENTS) == CONTEXT_SEGMENTS: for key in s._ctx_segs: setattr(s, key, ctx[key]) if (ContextFlags & CONTEXT_INTEGER) == CONTEXT_INTEGER: for key in s._ctx_int: setattr(s, key, ctx[key]) if (ContextFlags & CONTEXT_CONTROL) == CONTEXT_CONTROL: for key in s._ctx_ctrl: setattr(s, key, ctx[key]) if (ContextFlags & CONTEXT_EXTENDED_REGISTERS) == CONTEXT_EXTENDED_REGISTERS: er = ctx['ExtendedRegisters'] for index in compat.xrange(0, MAXIMUM_SUPPORTED_EXTENSION): s.ExtendedRegisters[index] = er[index] return s def to_dict(self): 'Convert a structure into a Python native type.' ctx = Context() ContextFlags = self.ContextFlags ctx['ContextFlags'] = ContextFlags if (ContextFlags & CONTEXT_DEBUG_REGISTERS) == CONTEXT_DEBUG_REGISTERS: for key in self._ctx_debug: ctx[key] = getattr(self, key) if (ContextFlags & CONTEXT_FLOATING_POINT) == CONTEXT_FLOATING_POINT: ctx['FloatSave'] = self.FloatSave.to_dict() if (ContextFlags & CONTEXT_SEGMENTS) == CONTEXT_SEGMENTS: for key in self._ctx_segs: ctx[key] = getattr(self, key) if (ContextFlags & CONTEXT_INTEGER) == CONTEXT_INTEGER: for key in self._ctx_int: ctx[key] = getattr(self, key) if (ContextFlags & CONTEXT_CONTROL) == CONTEXT_CONTROL: for key in self._ctx_ctrl: ctx[key] = getattr(self, key) if (ContextFlags & CONTEXT_EXTENDED_REGISTERS) == CONTEXT_EXTENDED_REGISTERS: er = [ self.ExtendedRegisters[index] for index in compat.xrange(0, MAXIMUM_SUPPORTED_EXTENSION) ] er = tuple(er) ctx['ExtendedRegisters'] = er return ctx PCONTEXT = POINTER(CONTEXT) LPCONTEXT = PCONTEXT class Context(dict): """ Register context dictionary for the i386 architecture. """ arch = CONTEXT.arch def __get_pc(self): return self['Eip'] def __set_pc(self, value): self['Eip'] = value pc = property(__get_pc, __set_pc) def __get_sp(self): return self['Esp'] def __set_sp(self, value): self['Esp'] = value sp = property(__get_sp, __set_sp) def __get_fp(self): return self['Ebp'] def __set_fp(self, value): self['Ebp'] = value fp = property(__get_fp, __set_fp) #--- LDT_ENTRY structure ------------------------------------------------------ # typedef struct _LDT_ENTRY { # WORD LimitLow; # WORD BaseLow; # union { # struct { # BYTE BaseMid; # BYTE Flags1; # BYTE Flags2; # BYTE BaseHi; # } Bytes; # struct { # DWORD BaseMid :8; # DWORD Type :5; # DWORD Dpl :2; # DWORD Pres :1; # DWORD LimitHi :4; # DWORD Sys :1; # DWORD Reserved_0 :1; # DWORD Default_Big :1; # DWORD Granularity :1; # DWORD BaseHi :8; # } Bits; # } HighWord; # } LDT_ENTRY, # *PLDT_ENTRY; class _LDT_ENTRY_BYTES_(Structure): _pack_ = 1 _fields_ = [ ('BaseMid', BYTE), ('Flags1', BYTE), ('Flags2', BYTE), ('BaseHi', BYTE), ] class _LDT_ENTRY_BITS_(Structure): _pack_ = 1 _fields_ = [ ('BaseMid', DWORD, 8), ('Type', DWORD, 5), ('Dpl', DWORD, 2), ('Pres', DWORD, 1), ('LimitHi', DWORD, 4), ('Sys', DWORD, 1), ('Reserved_0', DWORD, 1), ('Default_Big', DWORD, 1), ('Granularity', DWORD, 1), ('BaseHi', DWORD, 8), ] class _LDT_ENTRY_HIGHWORD_(Union): _pack_ = 1 _fields_ = [ ('Bytes', _LDT_ENTRY_BYTES_), ('Bits', _LDT_ENTRY_BITS_), ] class LDT_ENTRY(Structure): _pack_ = 1 _fields_ = [ ('LimitLow', WORD), ('BaseLow', WORD), ('HighWord', _LDT_ENTRY_HIGHWORD_), ] PLDT_ENTRY = POINTER(LDT_ENTRY) LPLDT_ENTRY = PLDT_ENTRY ############################################################################### # BOOL WINAPI GetThreadSelectorEntry( # __in HANDLE hThread, # __in DWORD dwSelector, # __out LPLDT_ENTRY lpSelectorEntry # ); def GetThreadSelectorEntry(hThread, dwSelector): _GetThreadSelectorEntry = windll.kernel32.GetThreadSelectorEntry _GetThreadSelectorEntry.argtypes = [HANDLE, DWORD, LPLDT_ENTRY] _GetThreadSelectorEntry.restype = bool _GetThreadSelectorEntry.errcheck = RaiseIfZero ldt = LDT_ENTRY() _GetThreadSelectorEntry(hThread, dwSelector, byref(ldt)) return ldt # BOOL WINAPI GetThreadContext( # __in HANDLE hThread, # __inout LPCONTEXT lpContext # ); def GetThreadContext(hThread, ContextFlags = None, raw = False): _GetThreadContext = windll.kernel32.GetThreadContext _GetThreadContext.argtypes = [HANDLE, LPCONTEXT] _GetThreadContext.restype = bool _GetThreadContext.errcheck = RaiseIfZero if ContextFlags is None: ContextFlags = CONTEXT_ALL | CONTEXT_i386 Context = CONTEXT() Context.ContextFlags = ContextFlags _GetThreadContext(hThread, byref(Context)) if raw: return Context return Context.to_dict() # BOOL WINAPI SetThreadContext( # __in HANDLE hThread, # __in const CONTEXT* lpContext # ); def SetThreadContext(hThread, lpContext): _SetThreadContext = windll.kernel32.SetThreadContext _SetThreadContext.argtypes = [HANDLE, LPCONTEXT] _SetThreadContext.restype = bool _SetThreadContext.errcheck = RaiseIfZero if isinstance(lpContext, dict): lpContext = CONTEXT.from_dict(lpContext) _SetThreadContext(hThread, byref(lpContext)) #============================================================================== # This calculates the list of exported symbols. _all = set(vars().keys()).difference(_all) __all__ = [_x for _x in _all if not _x.startswith('_')] __all__.sort() #==============================================================================
apache-2.0
h4ck3rm1k3/pywikibot-core
scripts/maintenance/make_i18n_dict.py
3
5239
#!/usr/bin/python # -*- coding: utf-8 -*- """ Generate a i18n file from a given script. usage: run IDLE at topmost level >>> import pwb >>> from scripts.maintenance.make_i18n_dict import i18nBot >>> bot = i18nBot('<scriptname>', '<msg dict>') >>> bot.run() If you have more than one message dictionary, give all these names to the bot: >>> bot = i18nBot('<scriptname>', '<msg dict1>', '<msg dict2>', '<msg dict3>') If you want to rename the message index use keyword arguments. This may be mixed with preleading positonal argumens: >>> bot = i18nBot('<scriptname>', '<msg dict1>', the_other_msg='<msg dict2>') If you have the messages as instance constants you may call the bot as follows: >>> bot = i18nBot('<scriptname>.<class instance>', '<msg dict1>', '<msg dict2>') It's also possible to make json files too by using to_json method after instantiating the bot. It also calls bot.run() to create the dictionaries. >>> bot.to_json() """ # # (C) xqt, 2013-2015 # (C) Pywikibot team, 2013-2015 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, unicode_literals __version__ = '$Id$' # import codecs import json import os from pywikibot import config class i18nBot(object): """I18n bot.""" def __init__(self, script, *args, **kwargs): """Constructor.""" modules = script.split('.') self.scriptname = modules[0] self.script = __import__('scripts.' + self.scriptname) for m in modules: self.script = getattr(self.script, m) self.messages = {} # setup the message dict for msg in args: if hasattr(self.script, msg): self.messages[msg] = msg else: print('message {0} not found'.format(msg)) for new, old in kwargs.items(): self.messages[old] = new.replace('_', '-') self.dict = {} def print_all(self): """Pretty print the dict as a file content to screen.""" if not self.dict: print('No messages found, read them first.\n' 'Use "run" or "to_json" methods') return keys = list(self.dict.keys()) keys.remove('qqq') keys.sort() keys.insert(0, 'qqq') if 'en' in keys: keys.remove('en') keys.insert(0, 'en') print("# -*- coding: utf-8 -*-") print("msg = {") for code in keys: print(" '%s': {" % code) for msg in sorted(self.messages.values()): label = "%s-%s" % (self.scriptname, msg) if label in self.dict[code]: print(" '%s': u'%s'," % (label, self.dict[code][label])) print(" },") print("};") def read(self, oldmsg, newmsg=None): """Read a single message from source script.""" msg = getattr(self.script, oldmsg) keys = list(msg.keys()) keys.append('qqq') if newmsg is None: newmsg = oldmsg for code in keys: label = "%s-%s" % (self.scriptname, newmsg) if code == 'qqq': if code not in self.dict: self.dict[code] = {} self.dict[code][label] = ( u'Edit summary for message %s of %s report' % (newmsg, self.scriptname)) elif code != 'commons': if code not in self.dict: self.dict[code] = {} self.dict[code][label] = msg[code] if 'en' not in keys: print('WARNING: "en" key missing for message %s' % newmsg) def run(self, quiet=False): """ Run the bot, read the messages from source and print the dict. @param quiet: print the result if False @type quiet: bool """ for item in self.messages.items(): self.read(*item) if not quiet: self.print_all() def to_json(self, quiet=True): """ Run the bot and create json files. @param quiet: Print the result if False @type quiet: bool """ IDENT = 4 if not self.dict: self.run(quiet) json_dir = os.path.join( config.base_dir, 'scripts/i18n', self.scriptname) if not os.path.exists(json_dir): os.makedirs(json_dir) for lang in self.dict: file_name = os.path.join(json_dir, '%s.json' % lang) if os.path.isfile(file_name): with codecs.open(file_name, 'r', 'utf-8') as json_file: new_dict = json.loads(json_file.read()) else: new_dict = {} new_dict['@metadata'] = new_dict.get('@metadata', {'authors': []}) with codecs.open(file_name, 'w', 'utf-8') as json_file: new_dict.update(self.dict[lang]) s = json.dumps(new_dict, ensure_ascii=False, sort_keys=True, indent=IDENT, separators=(',', ': ')) s = s.replace(' ' * IDENT, '\t') json_file.write(s) if __name__ == '__main__': print(__doc__)
mit
whereismyjetpack/ansible
lib/ansible/compat/selectors/_selectors2.py
124
24265
# This file is from the selectors2.py package. It backports the PSF Licensed # selectors module from the Python-3.5 stdlib to older versions of Python. # The author, Seth Michael Larson, dual licenses his modifications under the # PSF License and MIT License: # https://github.com/SethMichaelLarson/selectors2#license # # Seth's copy of the MIT license is reproduced below # # MIT License # # Copyright (c) 2016 Seth Michael Larson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Backport of selectors.py from Python 3.5+ to support Python < 3.4 # Also has the behavior specified in PEP 475 which is to retry syscalls # in the case of an EINTR error. This module is required because selectors34 # does not follow this behavior and instead returns that no dile descriptor # events have occurred rather than retry the syscall. The decision to drop # support for select.devpoll is made to maintain 100% test coverage. import errno import math import select import socket import sys import time from collections import namedtuple, Mapping try: monotonic = time.monotonic except (AttributeError, ImportError): # Python 3.3< monotonic = time.time __author__ = 'Seth Michael Larson' __email__ = 'sethmichaellarson@protonmail.com' __version__ = '1.1.0' __license__ = 'MIT' __all__ = [ 'EVENT_READ', 'EVENT_WRITE', 'SelectorError', 'SelectorKey', 'DefaultSelector' ] EVENT_READ = (1 << 0) EVENT_WRITE = (1 << 1) HAS_SELECT = True # Variable that shows whether the platform has a selector. _SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None. class SelectorError(Exception): def __init__(self, errcode): super(SelectorError, self).__init__() self.errno = errcode def __repr__(self): return "<SelectorError errno={0}>".format(self.errno) def __str__(self): return self.__repr__() def _fileobj_to_fd(fileobj): """ Return a file descriptor from a file object. If given an integer will simply return that integer back. """ if isinstance(fileobj, int): fd = fileobj else: try: fd = int(fileobj.fileno()) except (AttributeError, TypeError, ValueError): raise ValueError("Invalid file object: {0!r}".format(fileobj)) if fd < 0: raise ValueError("Invalid file descriptor: {0}".format(fd)) return fd # Python 3.5 uses a more direct route to wrap system calls to increase speed. if sys.version_info >= (3, 5): def _syscall_wrapper(func, _, *args, **kwargs): """ This is the short-circuit version of the below logic because in Python 3.5+ all selectors restart system calls. """ try: return func(*args, **kwargs) except (OSError, IOError, select.error) as e: errcode = None if hasattr(e, "errno"): errcode = e.errno elif hasattr(e, "args"): errcode = e.args[0] raise SelectorError(errcode) else: def _syscall_wrapper(func, recalc_timeout, *args, **kwargs): """ Wrapper function for syscalls that could fail due to EINTR. All functions should be retried if there is time left in the timeout in accordance with PEP 475. """ timeout = kwargs.get("timeout", None) if timeout is None: expires = None recalc_timeout = False else: timeout = float(timeout) if timeout < 0.0: # Timeout less than 0 treated as no timeout. expires = None else: expires = monotonic() + timeout args = list(args) if recalc_timeout and "timeout" not in kwargs: raise ValueError( "Timeout must be in args or kwargs to be recalculated") result = _SYSCALL_SENTINEL while result is _SYSCALL_SENTINEL: try: result = func(*args, **kwargs) # OSError is thrown by select.select # IOError is thrown by select.epoll.poll # select.error is thrown by select.poll.poll # Aren't we thankful for Python 3.x rework for exceptions? except (OSError, IOError, select.error) as e: # select.error wasn't a subclass of OSError in the past. errcode = None if hasattr(e, "errno"): errcode = e.errno elif hasattr(e, "args"): errcode = e.args[0] # Also test for the Windows equivalent of EINTR. is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and errcode == errno.WSAEINTR)) if is_interrupt: if expires is not None: current_time = monotonic() if current_time > expires: raise OSError(errno=errno.ETIMEDOUT) if recalc_timeout: if "timeout" in kwargs: kwargs["timeout"] = expires - current_time continue if errcode: raise SelectorError(errcode) else: raise return result SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) class _SelectorMapping(Mapping): """ Mapping of file objects to selector keys """ def __init__(self, selector): self._selector = selector def __len__(self): return len(self._selector._fd_to_key) def __getitem__(self, fileobj): try: fd = self._selector._fileobj_lookup(fileobj) return self._selector._fd_to_key[fd] except KeyError: raise KeyError("{0!r} is not registered.".format(fileobj)) def __iter__(self): return iter(self._selector._fd_to_key) class BaseSelector(object): """ Abstract Selector class A selector supports registering file objects to be monitored for specific I/O events. A file object is a file descriptor or any object with a `fileno()` method. An arbitrary object can be attached to the file object which can be used for example to store context info, a callback, etc. A selector can use various implementations (select(), poll(), epoll(), and kqueue()) depending on the platform. The 'DefaultSelector' class uses the most efficient implementation for the current platform. """ def __init__(self): # Maps file descriptors to keys. self._fd_to_key = {} # Read-only mapping returned by get_map() self._map = _SelectorMapping(self) def _fileobj_lookup(self, fileobj): """ Return a file descriptor from a file object. This wraps _fileobj_to_fd() to do an exhaustive search in case the object is invalid but we still have it in our map. Used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping """ try: return _fileobj_to_fd(fileobj) except ValueError: # Search through all our mapped keys. for key in self._fd_to_key.values(): if key.fileobj is fileobj: return key.fd # Raise ValueError after all. raise def register(self, fileobj, events, data=None): """ Register a file object for a set of events to monitor. """ if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): raise ValueError("Invalid events: {0!r}".format(events)) key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) if key.fd in self._fd_to_key: raise KeyError("{0!r} (FD {1}) is already registered" .format(fileobj, key.fd)) self._fd_to_key[key.fd] = key return key def unregister(self, fileobj): """ Unregister a file object from being monitored. """ try: key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) except KeyError: raise KeyError("{0!r} is not registered".format(fileobj)) # Getting the fileno of a closed socket on Windows errors with EBADF. except socket.error as err: if err.errno != errno.EBADF: raise else: for key in self._fd_to_key.values(): if key.fileobj is fileobj: self._fd_to_key.pop(key.fd) break else: raise KeyError("{0!r} is not registered".format(fileobj)) return key def modify(self, fileobj, events, data=None): """ Change a registered file object monitored events and data. """ # NOTE: Some subclasses optimize this operation even further. try: key = self._fd_to_key[self._fileobj_lookup(fileobj)] except KeyError: raise KeyError("{0!r} is not registered".format(fileobj)) if events != key.events: self.unregister(fileobj) key = self.register(fileobj, events, data) elif data != key.data: # Use a shortcut to update the data. key = key._replace(data=data) self._fd_to_key[key.fd] = key return key def select(self, timeout=None): """ Perform the actual selection until some monitored file objects are ready or the timeout expires. """ raise NotImplementedError() def close(self): """ Close the selector. This must be called to ensure that all underlying resources are freed. """ self._fd_to_key.clear() self._map = None def get_key(self, fileobj): """ Return the key associated with a registered file object. """ mapping = self.get_map() if mapping is None: raise RuntimeError("Selector is closed") try: return mapping[fileobj] except KeyError: raise KeyError("{0!r} is not registered".format(fileobj)) def get_map(self): """ Return a mapping of file objects to selector keys """ return self._map def _key_from_fd(self, fd): """ Return the key associated to a given file descriptor Return None if it is not found. """ try: return self._fd_to_key[fd] except KeyError: return None def __enter__(self): return self def __exit__(self, *args): self.close() # Almost all platforms have select.select() if hasattr(select, "select"): class SelectSelector(BaseSelector): """ Select-based selector. """ def __init__(self): super(SelectSelector, self).__init__() self._readers = set() self._writers = set() def register(self, fileobj, events, data=None): key = super(SelectSelector, self).register(fileobj, events, data) if events & EVENT_READ: self._readers.add(key.fd) if events & EVENT_WRITE: self._writers.add(key.fd) return key def unregister(self, fileobj): key = super(SelectSelector, self).unregister(fileobj) self._readers.discard(key.fd) self._writers.discard(key.fd) return key def _select(self, r, w, timeout=None): """ Wrapper for select.select because timeout is a positional arg """ return select.select(r, w, [], timeout) def select(self, timeout=None): # Selecting on empty lists on Windows errors out. if not len(self._readers) and not len(self._writers): return [] timeout = None if timeout is None else max(timeout, 0.0) ready = [] r, w, _ = _syscall_wrapper(self._select, True, self._readers, self._writers, timeout) r = set(r) w = set(w) for fd in r | w: events = 0 if fd in r: events |= EVENT_READ if fd in w: events |= EVENT_WRITE key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready __all__.append('SelectSelector') if hasattr(select, "poll"): class PollSelector(BaseSelector): """ Poll-based selector """ def __init__(self): super(PollSelector, self).__init__() self._poll = select.poll() def register(self, fileobj, events, data=None): key = super(PollSelector, self).register(fileobj, events, data) event_mask = 0 if events & EVENT_READ: event_mask |= select.POLLIN if events & EVENT_WRITE: event_mask |= select.POLLOUT self._poll.register(key.fd, event_mask) return key def unregister(self, fileobj): key = super(PollSelector, self).unregister(fileobj) self._poll.unregister(key.fd) return key def _wrap_poll(self, timeout=None): """ Wrapper function for select.poll.poll() so that _syscall_wrapper can work with only seconds. """ if timeout is not None: if timeout <= 0: timeout = 0 else: # select.poll.poll() has a resolution of 1 millisecond, # round away from zero to wait *at least* timeout seconds. timeout = math.ceil(timeout * 1e3) result = self._poll.poll(timeout) return result def select(self, timeout=None): ready = [] fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout) for fd, event_mask in fd_events: events = 0 if event_mask & ~select.POLLIN: events |= EVENT_WRITE if event_mask & ~select.POLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready __all__.append('PollSelector') if hasattr(select, "epoll"): class EpollSelector(BaseSelector): """ Epoll-based selector """ def __init__(self): super(EpollSelector, self).__init__() self._epoll = select.epoll() def fileno(self): return self._epoll.fileno() def register(self, fileobj, events, data=None): key = super(EpollSelector, self).register(fileobj, events, data) events_mask = 0 if events & EVENT_READ: events_mask |= select.EPOLLIN if events & EVENT_WRITE: events_mask |= select.EPOLLOUT _syscall_wrapper(self._epoll.register, False, key.fd, events_mask) return key def unregister(self, fileobj): key = super(EpollSelector, self).unregister(fileobj) try: _syscall_wrapper(self._epoll.unregister, False, key.fd) except SelectorError: # This can occur when the fd was closed since registry. pass return key def select(self, timeout=None): if timeout is not None: if timeout <= 0: timeout = 0.0 else: # select.epoll.poll() has a resolution of 1 millisecond # but luckily takes seconds so we don't need a wrapper # like PollSelector. Just for better rounding. timeout = math.ceil(timeout * 1e3) * 1e-3 timeout = float(timeout) else: timeout = -1.0 # epoll.poll() must have a float. # We always want at least 1 to ensure that select can be called # with no file descriptors registered. Otherwise will fail. max_events = max(len(self._fd_to_key), 1) ready = [] fd_events = _syscall_wrapper(self._epoll.poll, True, timeout=timeout, maxevents=max_events) for fd, event_mask in fd_events: events = 0 if event_mask & ~select.EPOLLIN: events |= EVENT_WRITE if event_mask & ~select.EPOLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready def close(self): self._epoll.close() super(EpollSelector, self).close() __all__.append('EpollSelector') if hasattr(select, "devpoll"): class DevpollSelector(BaseSelector): """Solaris /dev/poll selector.""" def __init__(self): super(DevpollSelector, self).__init__() self._devpoll = select.devpoll() def fileno(self): return self._devpoll.fileno() def register(self, fileobj, events, data=None): key = super(DevpollSelector, self).register(fileobj, events, data) poll_events = 0 if events & EVENT_READ: poll_events |= select.POLLIN if events & EVENT_WRITE: poll_events |= select.POLLOUT self._devpoll.register(key.fd, poll_events) return key def unregister(self, fileobj): key = super(DevpollSelector, self).unregister(fileobj) self._devpoll.unregister(key.fd) return key def _wrap_poll(self, timeout=None): """ Wrapper function for select.poll.poll() so that _syscall_wrapper can work with only seconds. """ if timeout is not None: if timeout <= 0: timeout = 0 else: # select.devpoll.poll() has a resolution of 1 millisecond, # round away from zero to wait *at least* timeout seconds. timeout = math.ceil(timeout * 1e3) result = self._devpoll.poll(timeout) return result def select(self, timeout=None): ready = [] fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout) for fd, event_mask in fd_events: events = 0 if event_mask & ~select.POLLIN: events |= EVENT_WRITE if event_mask & ~select.POLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready def close(self): self._devpoll.close() super(DevpollSelector, self).close() __all__.append('DevpollSelector') if hasattr(select, "kqueue"): class KqueueSelector(BaseSelector): """ Kqueue / Kevent-based selector """ def __init__(self): super(KqueueSelector, self).__init__() self._kqueue = select.kqueue() def fileno(self): return self._kqueue.fileno() def register(self, fileobj, events, data=None): key = super(KqueueSelector, self).register(fileobj, events, data) if events & EVENT_READ: kevent = select.kevent(key.fd, select.KQ_FILTER_READ, select.KQ_EV_ADD) _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) if events & EVENT_WRITE: kevent = select.kevent(key.fd, select.KQ_FILTER_WRITE, select.KQ_EV_ADD) _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) return key def unregister(self, fileobj): key = super(KqueueSelector, self).unregister(fileobj) if key.events & EVENT_READ: kevent = select.kevent(key.fd, select.KQ_FILTER_READ, select.KQ_EV_DELETE) try: _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) except SelectorError: pass if key.events & EVENT_WRITE: kevent = select.kevent(key.fd, select.KQ_FILTER_WRITE, select.KQ_EV_DELETE) try: _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) except SelectorError: pass return key def select(self, timeout=None): if timeout is not None: timeout = max(timeout, 0) max_events = len(self._fd_to_key) * 2 ready_fds = {} kevent_list = _syscall_wrapper(self._kqueue.control, True, None, max_events, timeout) for kevent in kevent_list: fd = kevent.ident event_mask = kevent.filter events = 0 if event_mask == select.KQ_FILTER_READ: events |= EVENT_READ if event_mask == select.KQ_FILTER_WRITE: events |= EVENT_WRITE key = self._key_from_fd(fd) if key: if key.fd not in ready_fds: ready_fds[key.fd] = (key, events & key.events) else: old_events = ready_fds[key.fd][1] ready_fds[key.fd] = (key, (events | old_events) & key.events) return list(ready_fds.values()) def close(self): self._kqueue.close() super(KqueueSelector, self).close() __all__.append('KqueueSelector') # Choose the best implementation, roughly: # kqueue == epoll == devpoll > poll > select. # select() also can't accept a FD > FD_SETSIZE (usually around 1024) if 'KqueueSelector' in globals(): # Platform-specific: Mac OS and BSD DefaultSelector = KqueueSelector elif 'DevpollSelector' in globals(): DefaultSelector = DevpollSelector elif 'EpollSelector' in globals(): # Platform-specific: Linux DefaultSelector = EpollSelector elif 'PollSelector' in globals(): # Platform-specific: Linux DefaultSelector = PollSelector elif 'SelectSelector' in globals(): # Platform-specific: Windows DefaultSelector = SelectSelector else: # Platform-specific: AppEngine def no_selector(_): raise ValueError("Platform does not have a selector") DefaultSelector = no_selector HAS_SELECT = False
gpl-3.0
USGSDenverPychron/pychron
pychron/rpc/rpcable.py
1
1606
# =============================================================================== # Copyright 2012 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= enthought library imports ======================= # ============= standard library imports ======================== # ============= local library imports ========================== from pychron.config_loadable import ConfigLoadable class RPCable(ConfigLoadable): rpc_server = None def load_rpc_server(self, port): self.info('starting rpc server port={}'.format(port)) from pychron.rpc.server import RPCServer self.rpc_server = RPCServer(manager=self, port=port) self.rpc_server.bootstrap() def _load_hook(self, config): if config.has_section('RPC'): rpc_port = self.config_get(config, 'RPC', 'port', cast='int') if rpc_port: self.load_rpc_server(rpc_port) # ============= EOF =============================================
apache-2.0
Johnzero/erp
openerp/addons/base/res/report/__init__.py
79
1201
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## #from report import report_sxw #report_sxw.report_sxw('report.partner.list', 'res.partner', 'addons/base/res/partner/report/partner_list.rml') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
vdenPython/wwwgaseng
wwwgaseng/settings.py
1
3071
""" Django settings for wwwgaseng project. Generated by 'django-admin startproject' using Django 1.8.3. For more information on this file, see https://docs.djangoproject.com/en/1.8/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.8/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '^#f2&b!=kivg))3%%622c*wizr!p!mo7p+z6ablz&vs)7b6_xt' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'ge', # Базавое приложени 'pto', # Проиложение для отдела ПТО 'oks', # Приложение для отдела ОКС ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) ROOT_URLCONF = 'wwwgaseng.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'wwwgaseng.wsgi.application' # Database # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'gaseng', 'USER': 'wwwadmin', 'PASSWORD': 'HMgaz004', 'HOST': '192.168.0.34', 'PORT': '5432', } } # Internationalization # https://docs.djangoproject.com/en/1.8/topics/i18n/ LANGUAGE_CODE = 'ru-RU' TIME_ZONE = 'Asia/Yekaterinburg' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.8/howto/static-files/ STATIC_URL = '/static/' MEDIA_ROOT = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media') MEDIA_URL = '/media/' ADMIN_MEDIA_PREFIX = '/media/admin/'
apache-2.0
modicum/wyoming
source/conf.py
1
11524
# -*- coding: utf-8 -*- # # wyoming documentation build configuration file, created by # sphinx-quickstart on Mon Jan 25 11:12:20 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'wyoming' copyright = u'2016, Modicum' author = u'Modicum' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'0.0' # The full version, including alpha/beta/rc tags. release = u'0.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'wyomingdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'wyoming.tex', u'wyoming Documentation', u'Modicum', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'wyoming', u'wyoming Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'wyoming', u'wyoming Documentation', author, 'wyoming', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = project epub_author = author epub_publisher = author epub_copyright = copyright # The basename for the epub file. It defaults to the project name. #epub_basename = project # The HTML theme for the epub output. Since the default themes are not # optimized for small screen space, using the same theme for HTML and epub # output is usually not wise. This defaults to 'epub', a theme designed to save # visual space. #epub_theme = 'epub' # The language of the text. It defaults to the language option # or 'en' if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. #epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files that should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Choose between 'default' and 'includehidden'. #epub_tocscope = 'default' # Fix unsupported image types using the Pillow. #epub_fix_images = False # Scale large images. #epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. #epub_show_urls = 'inline' # If false, no index is generated. #epub_use_index = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'python': ('https://docs.python.org/3.4', None)}
mit
SlimRemix/android_external_chromium_org
tools/usb_gadget/server.py
91
3886
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """WSGI application to manage a USB gadget. """ import datetime import hashlib import re import subprocess import sys import time import urllib2 from tornado import httpserver from tornado import ioloop from tornado import web import default_gadget VERSION_PATTERN = re.compile(r'.*usb_gadget-([a-z0-9]{32})\.zip') address = None chip = None claimed_by = None default = default_gadget.DefaultGadget() gadget = None hardware = None interface = None port = None def SwitchGadget(new_gadget): if chip.IsConfigured(): chip.Destroy() global gadget gadget = new_gadget gadget.AddStringDescriptor(3, address) chip.Create(gadget) class VersionHandler(web.RequestHandler): def get(self): version = 'unpackaged' for path in sys.path: match = VERSION_PATTERN.match(path) if match: version = match.group(1) break self.write(version) class UpdateHandler(web.RequestHandler): def post(self): fileinfo = self.request.files['file'][0] match = VERSION_PATTERN.match(fileinfo['filename']) if match is None: self.write('Filename must contain MD5 hash.') self.set_status(400) return content = fileinfo['body'] md5sum = hashlib.md5(content).hexdigest() if md5sum != match.group(1): self.write('File hash does not match.') self.set_status(400) return filename = 'usb_gadget-{}.zip'.format(md5sum) with open(filename, 'wb') as f: f.write(content) args = ['/usr/bin/python', filename, '--interface', interface, '--port', str(port), '--hardware', hardware] if claimed_by is not None: args.extend(['--start-claimed', claimed_by]) print 'Reloading with version {}...'.format(md5sum) global http_server if chip.IsConfigured(): chip.Destroy() http_server.stop() child = subprocess.Popen(args, close_fds=True) while True: child.poll() if child.returncode is not None: self.write('New package exited with error {}.' .format(child.returncode)) self.set_status(500) http_server = httpserver.HTTPServer(app) http_server.listen(port) SwitchGadget(gadget) return try: f = urllib2.urlopen('http://{}/version'.format(address)) if f.getcode() == 200: # Update complete, wait 1 second to make sure buffers are flushed. io_loop = ioloop.IOLoop.instance() io_loop.add_timeout(datetime.timedelta(seconds=1), io_loop.stop) return except urllib2.URLError: pass time.sleep(0.1) class ClaimHandler(web.RequestHandler): def post(self): global claimed_by if claimed_by is None: claimed_by = self.get_argument('session_id') else: self.write('Device is already claimed by "{}".'.format(claimed_by)) self.set_status(403) class UnclaimHandler(web.RequestHandler): def post(self): global claimed_by claimed_by = None if gadget != default: SwitchGadget(default) class UnconfigureHandler(web.RequestHandler): def post(self): SwitchGadget(default) class DisconnectHandler(web.RequestHandler): def post(self): if chip.IsConfigured(): chip.Destroy() class ReconnectHandler(web.RequestHandler): def post(self): if not chip.IsConfigured(): chip.Create(gadget) app = web.Application([ (r'/version', VersionHandler), (r'/update', UpdateHandler), (r'/claim', ClaimHandler), (r'/unclaim', UnclaimHandler), (r'/unconfigure', UnconfigureHandler), (r'/disconnect', DisconnectHandler), (r'/reconnect', ReconnectHandler), ]) http_server = httpserver.HTTPServer(app)
bsd-3-clause
Shaps/ansible
test/units/inventory/test_group.py
53
5176
# Copyright 2018 Alan Rominger <arominge@redhat.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from units.compat import unittest from ansible.inventory.group import Group from ansible.inventory.host import Host from ansible.errors import AnsibleError class TestGroup(unittest.TestCase): def test_depth_update(self): A = Group('A') B = Group('B') Z = Group('Z') A.add_child_group(B) A.add_child_group(Z) self.assertEqual(A.depth, 0) self.assertEqual(Z.depth, 1) self.assertEqual(B.depth, 1) def test_depth_update_dual_branches(self): alpha = Group('alpha') A = Group('A') alpha.add_child_group(A) B = Group('B') A.add_child_group(B) Z = Group('Z') alpha.add_child_group(Z) beta = Group('beta') B.add_child_group(beta) Z.add_child_group(beta) self.assertEqual(alpha.depth, 0) # apex self.assertEqual(beta.depth, 3) # alpha -> A -> B -> beta omega = Group('omega') omega.add_child_group(alpha) # verify that both paths are traversed to get the max depth value self.assertEqual(B.depth, 3) # omega -> alpha -> A -> B self.assertEqual(beta.depth, 4) # B -> beta def test_depth_recursion(self): A = Group('A') B = Group('B') A.add_child_group(B) # hypothetical of adding B as child group to A A.parent_groups.append(B) B.child_groups.append(A) # can't update depths of groups, because of loop with self.assertRaises(AnsibleError): B._check_children_depth() def test_loop_detection(self): A = Group('A') B = Group('B') C = Group('C') A.add_child_group(B) B.add_child_group(C) with self.assertRaises(AnsibleError): C.add_child_group(A) def test_direct_host_ordering(self): """Hosts are returned in order they are added """ group = Group('A') # host names not added in alphabetical order host_name_list = ['z', 'b', 'c', 'a', 'p', 'q'] expected_hosts = [] for host_name in host_name_list: h = Host(host_name) group.add_host(h) expected_hosts.append(h) assert group.get_hosts() == expected_hosts def test_sub_group_host_ordering(self): """With multiple nested groups, asserts that hosts are returned in deterministic order """ top_group = Group('A') expected_hosts = [] for name in ['z', 'b', 'c', 'a', 'p', 'q']: child = Group('group_{0}'.format(name)) top_group.add_child_group(child) host = Host('host_{0}'.format(name)) child.add_host(host) expected_hosts.append(host) assert top_group.get_hosts() == expected_hosts def test_populates_descendant_hosts(self): A = Group('A') B = Group('B') C = Group('C') h = Host('h') C.add_host(h) A.add_child_group(B) # B is child of A B.add_child_group(C) # C is descendant of A A.add_child_group(B) self.assertEqual(set(h.groups), set([C, B, A])) h2 = Host('h2') C.add_host(h2) self.assertEqual(set(h2.groups), set([C, B, A])) def test_ancestor_example(self): # see docstring for Group._walk_relationship groups = {} for name in ['A', 'B', 'C', 'D', 'E', 'F']: groups[name] = Group(name) # first row groups['A'].add_child_group(groups['D']) groups['B'].add_child_group(groups['D']) groups['B'].add_child_group(groups['E']) groups['C'].add_child_group(groups['D']) # second row groups['D'].add_child_group(groups['E']) groups['D'].add_child_group(groups['F']) groups['E'].add_child_group(groups['F']) self.assertEqual( set(groups['F'].get_ancestors()), set([ groups['A'], groups['B'], groups['C'], groups['D'], groups['E'] ]) ) def test_ancestors_recursive_loop_safe(self): ''' The get_ancestors method may be referenced before circular parenting checks, so the method is expected to be stable even with loops ''' A = Group('A') B = Group('B') A.parent_groups.append(B) B.parent_groups.append(A) # finishes in finite time self.assertEqual(A.get_ancestors(), set([A, B]))
gpl-3.0
fnouama/intellij-community
python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_has_key.py
326
3227
# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for has_key(). Calls to .has_key() methods are expressed in terms of the 'in' operator: d.has_key(k) -> k in d CAVEATS: 1) While the primary target of this fixer is dict.has_key(), the fixer will change any has_key() method call, regardless of its class. 2) Cases like this will not be converted: m = d.has_key if m(k): ... Only *calls* to has_key() are converted. While it is possible to convert the above to something like m = d.__contains__ if m(k): ... this is currently not done. """ # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name, parenthesize class FixHasKey(fixer_base.BaseFix): BM_compatible = True PATTERN = """ anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > after=any* > | negation=not_test< 'not' anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > > > """ def transform(self, node, results): assert results syms = self.syms if (node.parent.type == syms.not_test and self.pattern.match(node.parent)): # Don't transform a node matching the first alternative of the # pattern when its parent matches the second alternative return None negation = results.get("negation") anchor = results["anchor"] prefix = node.prefix before = [n.clone() for n in results["before"]] arg = results["arg"].clone() after = results.get("after") if after: after = [n.clone() for n in after] if arg.type in (syms.comparison, syms.not_test, syms.and_test, syms.or_test, syms.test, syms.lambdef, syms.argument): arg = parenthesize(arg) if len(before) == 1: before = before[0] else: before = pytree.Node(syms.power, before) before.prefix = u" " n_op = Name(u"in", prefix=u" ") if negation: n_not = Name(u"not", prefix=u" ") n_op = pytree.Node(syms.comp_op, (n_not, n_op)) new = pytree.Node(syms.comparison, (arg, n_op, before)) if after: new = parenthesize(new) new = pytree.Node(syms.power, (new,) + tuple(after)) if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr, syms.and_expr, syms.shift_expr, syms.arith_expr, syms.term, syms.factor, syms.power): new = parenthesize(new) new.prefix = prefix return new
apache-2.0
scottdangelo/RemoveVolumeMangerLocks
cinder/volume/drivers/srb.py
2
33438
# Copyright (c) 2014 Scality # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Volume driver for the Scality REST Block storage system This driver provisions Linux SRB volumes leveraging RESTful storage platforms (e.g. Scality CDMI). """ import contextlib import functools import re import sys import time from oslo_concurrency import lockutils from oslo_concurrency import processutils as putils from oslo_config import cfg from oslo_log import log as logging from oslo_utils import excutils from oslo_utils import units import six from six.moves import range from cinder.brick.local_dev import lvm from cinder import exception from cinder.i18n import _, _LI, _LE, _LW from cinder.image import image_utils from cinder import utils from cinder.volume import driver from cinder.volume import utils as volutils LOG = logging.getLogger(__name__) srb_opts = [ cfg.StrOpt('srb_base_urls', default=None, help='Comma-separated list of REST servers IP to connect to. ' '(eg http://IP1/,http://IP2:81/path'), ] CONF = cfg.CONF CONF.register_opts(srb_opts) ACCEPTED_REST_SERVER = re.compile(r'^http://' '(\d{1,3}\.){3}\d{1,3}' '(:\d+)?/[a-zA-Z0-9\-_\/]*$') class retry(object): SLEEP_NONE = 'none' SLEEP_DOUBLE = 'double' SLEEP_INCREMENT = 'increment' def __init__(self, exceptions, count, sleep_mechanism=SLEEP_INCREMENT, sleep_factor=1): if sleep_mechanism not in [self.SLEEP_NONE, self.SLEEP_DOUBLE, self.SLEEP_INCREMENT]: raise ValueError('Invalid value for `sleep_mechanism` argument') self._exceptions = exceptions self._count = count self._sleep_mechanism = sleep_mechanism self._sleep_factor = sleep_factor def __call__(self, fun): func_name = fun.func_name @functools.wraps(fun) def wrapped(*args, **kwargs): sleep_time = self._sleep_factor exc_info = None for attempt in range(self._count): if attempt != 0: LOG.warning(_LW('Retrying failed call to %(func)s, ' 'attempt %(attempt)i.'), {'func': func_name, 'attempt': attempt}) try: return fun(*args, **kwargs) except self._exceptions: exc_info = sys.exc_info() if attempt != self._count - 1: if self._sleep_mechanism == self.SLEEP_NONE: continue elif self._sleep_mechanism == self.SLEEP_INCREMENT: time.sleep(sleep_time) sleep_time += self._sleep_factor elif self._sleep_mechanism == self.SLEEP_DOUBLE: time.sleep(sleep_time) sleep_time *= 2 else: raise ValueError('Unknown sleep mechanism: %r' % self._sleep_mechanism) six.reraise(exc_info[0], exc_info[1], exc_info[2]) return wrapped class LVM(lvm.LVM): def activate_vg(self): """Activate the Volume Group associated with this instantiation. :raises: putils.ProcessExecutionError """ cmd = ['vgchange', '-ay', self.vg_name] try: self._execute(*cmd, root_helper=self._root_helper, run_as_root=True) except putils.ProcessExecutionError as err: LOG.exception(_LE('Error activating Volume Group')) LOG.error(_LE('Cmd :%s'), err.cmd) LOG.error(_LE('StdOut :%s'), err.stdout) LOG.error(_LE('StdErr :%s'), err.stderr) raise def deactivate_vg(self): """Deactivate the Volume Group associated with this instantiation. This forces LVM to release any reference to the device. :raises: putils.ProcessExecutionError """ cmd = ['vgchange', '-an', self.vg_name] try: self._execute(*cmd, root_helper=self._root_helper, run_as_root=True) except putils.ProcessExecutionError as err: LOG.exception(_LE('Error deactivating Volume Group')) LOG.error(_LE('Cmd :%s'), err.cmd) LOG.error(_LE('StdOut :%s'), err.stdout) LOG.error(_LE('StdErr :%s'), err.stderr) raise def destroy_vg(self): """Destroy the Volume Group associated with this instantiation. :raises: putils.ProcessExecutionError """ cmd = ['vgremove', '-f', self.vg_name] try: self._execute(*cmd, root_helper=self._root_helper, run_as_root=True) except putils.ProcessExecutionError as err: LOG.exception(_LE('Error destroying Volume Group')) LOG.error(_LE('Cmd :%s'), err.cmd) LOG.error(_LE('StdOut :%s'), err.stdout) LOG.error(_LE('StdErr :%s'), err.stderr) raise def pv_resize(self, pv_name, new_size_str): """Extend the size of an existing PV (for virtual PVs). :raises: putils.ProcessExecutionError """ try: self._execute('pvresize', '--setphysicalvolumesize', new_size_str, pv_name, root_helper=self._root_helper, run_as_root=True) except putils.ProcessExecutionError as err: LOG.exception(_LE('Error resizing Physical Volume')) LOG.error(_LE('Cmd :%s'), err.cmd) LOG.error(_LE('StdOut :%s'), err.stdout) LOG.error(_LE('StdErr :%s'), err.stderr) raise def extend_thin_pool(self): """Extend the size of the thin provisioning pool. This method extends the size of a thin provisioning pool to 95% of the size of the VG, if the VG is configured as thin and owns a thin provisioning pool. :raises: putils.ProcessExecutionError """ if self.vg_thin_pool is None: return new_size_str = self._calculate_thin_pool_size() try: self._execute('lvextend', '-L', new_size_str, "%s/%s-pool" % (self.vg_name, self.vg_name), root_helper=self._root_helper, run_as_root=True) except putils.ProcessExecutionError as err: LOG.exception(_LE('Error extending thin provisioning pool')) LOG.error(_LE('Cmd :%s'), err.cmd) LOG.error(_LE('StdOut :%s'), err.stdout) LOG.error(_LE('StdErr :%s'), err.stderr) raise @contextlib.contextmanager def patched(obj, attr, fun): """Context manager to locally patch a method. Within the managed context, the `attr` method of `obj` will be replaced by a method which calls `fun` passing in the original `attr` attribute of `obj` as well as any positional and keyword arguments. At the end of the context, the original method is restored. """ orig = getattr(obj, attr) def patch(*args, **kwargs): return fun(orig, *args, **kwargs) setattr(obj, attr, patch) try: yield finally: setattr(obj, attr, orig) @contextlib.contextmanager def handle_process_execution_error(message, info_message, reraise=True): """Consistently handle `putils.ProcessExecutionError` exceptions This context-manager will catch any `putils.ProcessExecutionError` exceptions raised in the managed block, and generate logging output accordingly. The value of the `message` argument will be logged at `logging.ERROR` level, and the `info_message` argument at `logging.INFO` level. Finally the command string, exit code, standard output and error output of the process will be logged at `logging.DEBUG` level. The `reraise` argument specifies what should happen when a `putils.ProcessExecutionError` is caught. If it's equal to `True`, the exception will be re-raised. If it's some other non-`False` object, this object will be raised instead (so you most likely want it to be some `Exception`). Any `False` value will result in the exception to be swallowed. """ try: yield except putils.ProcessExecutionError as exc: LOG.error(message) LOG.info(info_message) LOG.debug('Command : %s', exc.cmd) LOG.debug('Exit Code : %r', exc.exit_code) LOG.debug('StdOut : %s', exc.stdout) LOG.debug('StdErr : %s', exc.stderr) if reraise is True: raise elif reraise: raise reraise # pylint: disable=E0702 @contextlib.contextmanager def temp_snapshot(driver, volume, src_vref): snapshot = {'volume_name': src_vref['name'], 'volume_id': src_vref['id'], 'volume_size': src_vref['size'], 'name': 'snapshot-clone-%s' % volume['id'], 'id': 'tmp-snap-%s' % volume['id'], 'size': src_vref['size']} driver.create_snapshot(snapshot) try: yield snapshot finally: driver.delete_snapshot(snapshot) @contextlib.contextmanager def temp_raw_device(driver, volume): driver._attach_file(volume) try: yield finally: driver._detach_file(volume) @contextlib.contextmanager def temp_lvm_device(driver, volume): with temp_raw_device(driver, volume): vg = driver._get_lvm_vg(volume) vg.activate_vg() yield vg class SRBDriver(driver.VolumeDriver): """Scality SRB volume driver This driver manages volumes provisioned by the Scality REST Block driver Linux kernel module, backed by RESTful storage providers (e.g. Scality CDMI). """ VERSION = '1.1.0' # Over-allocation ratio (multiplied with requested size) for thin # provisioning OVER_ALLOC_RATIO = 2 SNAPSHOT_PREFIX = 'snapshot' def __init__(self, *args, **kwargs): super(SRBDriver, self).__init__(*args, **kwargs) self.configuration.append_config_values(srb_opts) self.urls_setup = False self.backend_name = None self.base_urls = None self.root_helper = utils.get_root_helper() self._attached_devices = {} def _setup_urls(self): if not self.base_urls: message = _("No url configured") raise exception.VolumeBackendAPIException(data=message) with handle_process_execution_error( message=_LE('Cound not setup urls on the Block Driver.'), info_message=_LI('Error creating Volume'), reraise=False): cmd = self.base_urls path = '/sys/class/srb/add_urls' putils.execute('tee', path, process_input=cmd, root_helper=self.root_helper, run_as_root=True) self.urls_setup = True def do_setup(self, context): """Any initialization the volume driver does while starting.""" self.backend_name = self.configuration.safe_get('volume_backend_name') base_urls = self.configuration.safe_get('srb_base_urls') sane_urls = [] if base_urls: for url in base_urls.split(','): stripped_url = url.strip() if ACCEPTED_REST_SERVER.match(stripped_url): sane_urls.append(stripped_url) else: LOG.warning(_LW("%s is not an accepted REST server " "IP address"), stripped_url) self.base_urls = ','.join(sane_urls) self._setup_urls() def check_for_setup_error(self): """Returns an error if prerequisites aren't met.""" if not self.base_urls: LOG.warning(_LW("Configuration variable srb_base_urls" " not set or empty.")) if self.urls_setup is False: message = _("Could not setup urls properly") raise exception.VolumeBackendAPIException(data=message) @classmethod def _is_snapshot(cls, volume): return volume['name'].startswith(cls.SNAPSHOT_PREFIX) @classmethod def _get_volname(cls, volume): """Returns the name of the actual volume If the volume is a snapshot, it returns the name of the parent volume. otherwise, returns the volume's name. """ name = volume['name'] if cls._is_snapshot(volume): name = "volume-%s" % (volume['volume_id']) return name @classmethod def _get_volid(cls, volume): """Returns the ID of the actual volume If the volume is a snapshot, it returns the ID of the parent volume. otherwise, returns the volume's id. """ volid = volume['id'] if cls._is_snapshot(volume): volid = volume['volume_id'] return volid @classmethod def _device_name(cls, volume): volume_id = cls._get_volid(volume) name = 'cinder-%s' % volume_id # Device names can't be longer than 32 bytes (incl. \0) return name[:31] @classmethod def _device_path(cls, volume): return "/dev/" + cls._device_name(volume) @classmethod def _escape_snapshot(cls, snapshot_name): # Linux LVM reserves name that starts with snapshot, so that # such volume name can't be created. Mangle it. if not snapshot_name.startswith(cls.SNAPSHOT_PREFIX): return snapshot_name return '_' + snapshot_name @classmethod def _mapper_path(cls, volume): groupname = cls._get_volname(volume) name = volume['name'] if cls._is_snapshot(volume): name = cls._escape_snapshot(name) # NOTE(vish): stops deprecation warning groupname = groupname.replace('-', '--') name = name.replace('-', '--') return "/dev/mapper/%s-%s" % (groupname, name) @staticmethod def _size_int(size_in_g): try: return max(int(size_in_g), 1) except ValueError: message = (_("Invalid size parameter '%s': Cannot be interpreted" " as an integer value.") % size_in_g) LOG.error(message) raise exception.VolumeBackendAPIException(data=message) @classmethod def _set_device_path(cls, volume): volume['provider_location'] = cls._get_volname(volume) return { 'provider_location': volume['provider_location'], } @staticmethod def _activate_lv(orig, *args, **kwargs): """Activate lv. Use with `patched` to patch `lvm.LVM.activate_lv` to ignore `EEXIST` """ try: orig(*args, **kwargs) except putils.ProcessExecutionError as exc: if exc.exit_code != 5: raise else: LOG.debug('`activate_lv` returned 5, ignored') def _get_lvm_vg(self, volume, create_vg=False): # NOTE(joachim): One-device volume group to manage thin snapshots # Get origin volume name even for snapshots volume_name = self._get_volname(volume) physical_volumes = [self._device_path(volume)] with patched(lvm.LVM, 'activate_lv', self._activate_lv): return LVM(volume_name, utils.get_root_helper(), create_vg=create_vg, physical_volumes=physical_volumes, lvm_type='thin', executor=self._execute) @staticmethod def _volume_not_present(vg, volume_name): # Used to avoid failing to delete a volume for which # the create operation partly failed return vg.get_volume(volume_name) is None def _create_file(self, volume): message = _('Could not create volume on any configured REST server.') with handle_process_execution_error( message=message, info_message=_LI('Error creating Volume %s.') % volume['name'], reraise=exception.VolumeBackendAPIException(data=message)): size = self._size_int(volume['size']) * self.OVER_ALLOC_RATIO cmd = volume['name'] cmd += ' %dG' % size path = '/sys/class/srb/create' putils.execute('tee', path, process_input=cmd, root_helper=self.root_helper, run_as_root=True) return self._set_device_path(volume) def _extend_file(self, volume, new_size): message = _('Could not extend volume on any configured REST server.') with handle_process_execution_error( message=message, info_message=(_LI('Error extending Volume %s.') % volume['name']), reraise=exception.VolumeBackendAPIException(data=message)): size = self._size_int(new_size) * self.OVER_ALLOC_RATIO cmd = volume['name'] cmd += ' %dG' % size path = '/sys/class/srb/extend' putils.execute('tee', path, process_input=cmd, root_helper=self.root_helper, run_as_root=True) @staticmethod def _destroy_file(volume): message = _('Could not destroy volume on any configured REST server.') volname = volume['name'] with handle_process_execution_error( message=message, info_message=_LI('Error destroying Volume %s.') % volname, reraise=exception.VolumeBackendAPIException(data=message)): cmd = volume['name'] path = '/sys/class/srb/destroy' putils.execute('tee', path, process_input=cmd, root_helper=utils.get_root_helper(), run_as_root=True) # NOTE(joachim): Must only be called within a function decorated by: # @lockutils.synchronized('devices', 'cinder-srb-') def _increment_attached_count(self, volume): """Increments the attach count of the device""" volid = self._get_volid(volume) if volid not in self._attached_devices: self._attached_devices[volid] = 1 else: self._attached_devices[volid] += 1 # NOTE(joachim): Must only be called within a function decorated by: # @lockutils.synchronized('devices', 'cinder-srb-') def _decrement_attached_count(self, volume): """Decrements the attach count of the device""" volid = self._get_volid(volume) if volid not in self._attached_devices: raise exception.VolumeBackendAPIException( (_("Internal error in srb driver: " "Trying to detach detached volume %s.")) % (self._get_volname(volume)) ) self._attached_devices[volid] -= 1 if self._attached_devices[volid] == 0: del self._attached_devices[volid] # NOTE(joachim): Must only be called within a function decorated by: # @lockutils.synchronized('devices', 'cinder-srb-') def _get_attached_count(self, volume): volid = self._get_volid(volume) return self._attached_devices.get(volid, 0) @lockutils.synchronized('devices', 'cinder-srb-') def _is_attached(self, volume): return self._get_attached_count(volume) > 0 @lockutils.synchronized('devices', 'cinder-srb-') def _attach_file(self, volume): name = self._get_volname(volume) devname = self._device_name(volume) LOG.debug('Attaching volume %(name)s as %(devname)s', {'name': name, 'devname': devname}) count = self._get_attached_count(volume) if count == 0: message = (_('Could not attach volume %(vol)s as %(dev)s ' 'on system.') % {'vol': name, 'dev': devname}) with handle_process_execution_error( message=message, info_message=_LI('Error attaching Volume'), reraise=exception.VolumeBackendAPIException(data=message)): cmd = name + ' ' + devname path = '/sys/class/srb/attach' putils.execute('tee', path, process_input=cmd, root_helper=self.root_helper, run_as_root=True) else: LOG.debug('Volume %s already attached', name) self._increment_attached_count(volume) @retry(exceptions=(putils.ProcessExecutionError, ), count=3, sleep_mechanism=retry.SLEEP_INCREMENT, sleep_factor=5) def _do_deactivate(self, volume, vg): vg.deactivate_vg() @retry(exceptions=(putils.ProcessExecutionError, ), count=5, sleep_mechanism=retry.SLEEP_DOUBLE, sleep_factor=1) def _do_detach(self, volume, vg): devname = self._device_name(volume) volname = self._get_volname(volume) cmd = devname path = '/sys/class/srb/detach' try: putils.execute('tee', path, process_input=cmd, root_helper=self.root_helper, run_as_root=True) except putils.ProcessExecutionError: with excutils.save_and_reraise_exception(reraise=True): try: with patched(lvm.LVM, 'activate_lv', self._activate_lv): vg.activate_lv(volname) self._do_deactivate(volume, vg) except putils.ProcessExecutionError: LOG.warning(_LW('All attempts to recover failed detach ' 'of %(volume)s failed.'), {'volume': volname}) @lockutils.synchronized('devices', 'cinder-srb-') def _detach_file(self, volume): name = self._get_volname(volume) devname = self._device_name(volume) vg = self._get_lvm_vg(volume) LOG.debug('Detaching device %s', devname) count = self._get_attached_count(volume) if count > 1: LOG.info(_LI('Reference count of %(volume)s is %(count)d, ' 'not detaching.'), {'volume': volume['name'], 'count': count}) return message = (_('Could not detach volume %(vol)s from device %(dev)s.') % {'vol': name, 'dev': devname}) with handle_process_execution_error( message=message, info_message=_LI('Error detaching Volume'), reraise=exception.VolumeBackendAPIException(data=message)): try: if vg is not None: self._do_deactivate(volume, vg) except putils.ProcessExecutionError: LOG.error(_LE('Could not deactivate volume group %s'), self._get_volname(volume)) raise try: self._do_detach(volume, vg=vg) except putils.ProcessExecutionError: LOG.error(_LE('Could not detach volume %(vol)s from device ' '%(dev)s.'), {'vol': name, 'dev': devname}) raise self._decrement_attached_count(volume) def _setup_lvm(self, volume): # NOTE(joachim): One-device volume group to manage thin snapshots size = self._size_int(volume['size']) * self.OVER_ALLOC_RATIO size_str = '%dg' % size vg = self._get_lvm_vg(volume, create_vg=True) vg.create_volume(volume['name'], size_str, lv_type='thin') def _destroy_lvm(self, volume): vg = self._get_lvm_vg(volume) if vg.lv_has_snapshot(volume['name']): LOG.error(_LE('Unable to delete due to existing snapshot ' 'for volume: %s.'), volume['name']) raise exception.VolumeIsBusy(volume_name=volume['name']) vg.destroy_vg() # NOTE(joachim) Force lvm vg flush through a vgs command vgs = vg.get_all_volume_groups(root_helper=self.root_helper, vg_name=vg.vg_name) if len(vgs) != 0: LOG.warning(_LW('Removed volume group %s still appears in vgs.'), vg.vg_name) def _create_and_copy_volume(self, dstvol, srcvol): """Creates a volume from a volume or a snapshot.""" updates = self._create_file(dstvol) # We need devices attached for IO operations. with temp_lvm_device(self, srcvol) as vg, \ temp_raw_device(self, dstvol): self._setup_lvm(dstvol) # Some configurations of LVM do not automatically activate # ThinLVM snapshot LVs. with patched(lvm.LVM, 'activate_lv', self._activate_lv): vg.activate_lv(srcvol['name'], True) # copy_volume expects sizes in MiB, we store integer GiB # be sure to convert before passing in volutils.copy_volume(self._mapper_path(srcvol), self._mapper_path(dstvol), srcvol['volume_size'] * units.Ki, self.configuration.volume_dd_blocksize, execute=self._execute) return updates def create_volume(self, volume): """Creates a volume. Can optionally return a Dictionary of changes to the volume object to be persisted. """ updates = self._create_file(volume) # We need devices attached for LVM operations. with temp_raw_device(self, volume): self._setup_lvm(volume) return updates def create_volume_from_snapshot(self, volume, snapshot): """Creates a volume from a snapshot.""" return self._create_and_copy_volume(volume, snapshot) def create_cloned_volume(self, volume, src_vref): """Creates a clone of the specified volume.""" LOG.info(_LI('Creating clone of volume: %s'), src_vref['id']) updates = None with temp_lvm_device(self, src_vref): with temp_snapshot(self, volume, src_vref) as snapshot: updates = self._create_and_copy_volume(volume, snapshot) return updates def delete_volume(self, volume): """Deletes a volume.""" attached = False if self._is_attached(volume): attached = True with temp_lvm_device(self, volume): self._destroy_lvm(volume) self._detach_file(volume) LOG.debug('Deleting volume %(volume_name)s, attached=%(attached)s', {'volume_name': volume['name'], 'attached': attached}) self._destroy_file(volume) def create_snapshot(self, snapshot): """Creates a snapshot.""" with temp_lvm_device(self, snapshot) as vg: # NOTE(joachim) we only want to support thin lvm_types vg.create_lv_snapshot(self._escape_snapshot(snapshot['name']), snapshot['volume_name'], lv_type='thin') def delete_snapshot(self, snapshot): """Deletes a snapshot.""" with temp_lvm_device(self, snapshot) as vg: if self._volume_not_present( vg, self._escape_snapshot(snapshot['name'])): # If the snapshot isn't present, then don't attempt to delete LOG.warning(_LW("snapshot: %s not found, " "skipping delete operations"), snapshot['name']) return vg.delete(self._escape_snapshot(snapshot['name'])) def get_volume_stats(self, refresh=False): """Return the current state of the volume service.""" stats = { 'vendor_name': 'Scality', 'driver_version': self.VERSION, 'storage_protocol': 'Scality Rest Block Device', 'total_capacity_gb': 'infinite', 'free_capacity_gb': 'infinite', 'reserved_percentage': 0, 'volume_backend_name': self.backend_name, } return stats def copy_image_to_volume(self, context, volume, image_service, image_id): """Fetch the image from image_service and write it to the volume.""" with temp_lvm_device(self, volume): image_utils.fetch_to_volume_format(context, image_service, image_id, self._mapper_path(volume), 'qcow2', self.configuration. volume_dd_blocksize, size=volume['size']) def copy_volume_to_image(self, context, volume, image_service, image_meta): """Copy the volume to the specified image.""" with temp_lvm_device(self, volume): image_utils.upload_volume(context, image_service, image_meta, self._mapper_path(volume)) def extend_volume(self, volume, new_size): new_alloc_size = self._size_int(new_size) * self.OVER_ALLOC_RATIO new_size_str = '%dg' % new_alloc_size self._extend_file(volume, new_size) with temp_lvm_device(self, volume) as vg: vg.pv_resize(self._device_path(volume), new_size_str) vg.extend_thin_pool() vg.extend_volume(volume['name'], new_size_str) class SRBISCSIDriver(SRBDriver, driver.ISCSIDriver): """Scality SRB volume driver with ISCSI support This driver manages volumes provisioned by the Scality REST Block driver Linux kernel module, backed by RESTful storage providers (e.g. Scality CDMI), and exports them through ISCSI to Nova. """ VERSION = '1.0.0' def __init__(self, *args, **kwargs): self.db = kwargs.get('db') self.target_driver = \ self.target_mapping[self.configuration.safe_get('iscsi_helper')] super(SRBISCSIDriver, self).__init__(*args, **kwargs) self.backend_name =\ self.configuration.safe_get('volume_backend_name') or 'SRB_iSCSI' self.protocol = 'iSCSI' def ensure_export(self, context, volume): device_path = self._mapper_path(volume) model_update = self.target_driver.ensure_export(context, volume, device_path) if model_update: self.db.volume_update(context, volume['id'], model_update) def create_export(self, context, volume, connector): """Creates an export for a logical volume.""" self._attach_file(volume) vg = self._get_lvm_vg(volume) vg.activate_vg() # SRB uses the same name as the volume for the VG volume_path = self._mapper_path(volume) data = self.target_driver.create_export(context, volume, volume_path) return { 'provider_location': data['location'], 'provider_auth': data['auth'], } def remove_export(self, context, volume): # NOTE(joachim) Taken from iscsi._ExportMixin.remove_export # This allows us to avoid "detaching" a device not attached by # an export, and avoid screwing up the device attach refcount. try: # Raises exception.NotFound if export not provisioned iscsi_target = self.target_driver._get_iscsi_target(context, volume['id']) # Raises an Exception if currently not exported location = volume['provider_location'].split(' ') iqn = location[1] self.target_driver.show_target(iscsi_target, iqn=iqn) self.target_driver.remove_export(context, volume) self._detach_file(volume) except exception.NotFound: LOG.warning(_LW('Volume %r not found while trying to remove.'), volume['id']) except Exception as exc: LOG.warning(_LW('Error while removing export: %r'), exc)
apache-2.0
vismartltd/edx-platform
lms/djangoapps/shoppingcart/migrations/0024_auto__add_field_courseregistrationcode_mode_slug.py
109
18586
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'CourseRegistrationCode.mode_slug' db.add_column('shoppingcart_courseregistrationcode', 'mode_slug', self.gf('django.db.models.fields.CharField')(max_length=100, null=True), keep_default=False) def backwards(self, orm): # Deleting field 'CourseRegistrationCode.mode_slug' db.delete_column('shoppingcart_courseregistrationcode', 'mode_slug') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'shoppingcart.certificateitem': { 'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']}, 'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}), 'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), 'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'}) }, 'shoppingcart.coupon': { 'Meta': {'object_name': 'Coupon'}, 'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 12, 0, 0)'}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'expiration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'percentage_discount': ('django.db.models.fields.IntegerField', [], {'default': '0'}) }, 'shoppingcart.couponredemption': { 'Meta': {'object_name': 'CouponRedemption'}, 'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Coupon']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'shoppingcart.courseregcodeitem': { 'Meta': {'object_name': 'CourseRegCodeItem', '_ormbases': ['shoppingcart.OrderItem']}, 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}), 'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}), 'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'}) }, 'shoppingcart.courseregcodeitemannotation': { 'Meta': {'object_name': 'CourseRegCodeItemAnnotation'}, 'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'shoppingcart.courseregistrationcode': { 'Meta': {'object_name': 'CourseRegistrationCode'}, 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 12, 0, 0)'}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_by_user'", 'to': "orm['auth.User']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Invoice']", 'null': 'True'}), 'mode_slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'purchase_order'", 'null': 'True', 'to': "orm['shoppingcart.Order']"}) }, 'shoppingcart.donation': { 'Meta': {'object_name': 'Donation', '_ormbases': ['shoppingcart.OrderItem']}, 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}), 'donation_type': ('django.db.models.fields.CharField', [], {'default': "'general'", 'max_length': '32'}), 'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'}) }, 'shoppingcart.donationconfiguration': { 'Meta': {'object_name': 'DonationConfiguration'}, 'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'shoppingcart.invoice': { 'Meta': {'object_name': 'Invoice'}, 'address_line_1': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'address_line_2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'address_line_3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'company_contact_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'company_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}), 'customer_reference_number': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'internal_reference': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'recipient_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'total_amount': ('django.db.models.fields.FloatField', [], {}), 'zip': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True'}) }, 'shoppingcart.order': { 'Meta': {'object_name': 'Order'}, 'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}), 'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}), 'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}), 'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}), 'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}), 'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}), 'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}), 'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}), 'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'company_contact_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'company_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}), 'customer_reference_number': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order_type': ('django.db.models.fields.CharField', [], {'default': "'personal'", 'max_length': '32'}), 'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'recipient_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'refunded_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'shoppingcart.orderitem': { 'Meta': {'object_name': 'OrderItem'}, 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}), 'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}), 'list_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '30', 'decimal_places': '2'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}), 'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'refund_requested_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}), 'service_fee': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32', 'db_index': 'True'}), 'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'shoppingcart.paidcourseregistration': { 'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']}, 'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']", 'null': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}), 'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}), 'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'}) }, 'shoppingcart.paidcourseregistrationannotation': { 'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'}, 'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'shoppingcart.registrationcoderedemption': { 'Meta': {'object_name': 'RegistrationCodeRedemption'}, 'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']", 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']", 'null': 'True'}), 'redeemed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 12, 0, 0)', 'null': 'True'}), 'redeemed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'registration_code': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.CourseRegistrationCode']"}) }, 'student.courseenrollment': { 'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'}, 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) } } complete_apps = ['shoppingcart']
agpl-3.0
Lothilius/python-jumble
Permutations.py
1
2071
# Files: Permutations.py # # Description: Creats permutations of an entered word. # __author__ = 'lothilius' import math class Permutations(): """This module contains functions to generate all unique permutations of a string, and to count permutations. """ def countOccurrences(word): # create a list of 26 0s to count occurrences of each # letter. word = word.lower() occurs = [0]*26 for ch in word: i = ord(ch) - ord('a') occurs[i] += 1 return occurs def howManyPerms(word): """Return the number of permutations and unique permutations of a string. """ word = word.lower() n = len(word) # count the occurrences of each letter in word. occurs = Permutations.countOccurrences(word) # For any letter that recurs, the number of unique # permutations is the totalPerms divided by the # factorial of that count. divisor = 1 for i in range(26): if occurs[i] > 1: divisor *= math.factorial(occurs[i]) totalPerms = math.factorial(n) uniquePerms = totalPerms / divisor return (totalPerms, uniquePerms) # Fixed this so that it doesn't return duplicates. def allPermsAux(word, permsSeen): """This is an auxiliary function that generates all unique permutations of the input string not already in the list permsSeen. """ if len(word) <=1: yield word else: for perm in Permutations.allPermsAux(word[1:], permsSeen): for i in range(len(perm)+1): newperm = perm[:i] + word[0] + perm[i:] if not newperm in permsSeen: permsSeen.append(newperm) yield newperm def allPerms(word): """This function generates all unique permutations of the input string. """ return Permutations.allPermsAux(word, [])
mit
hexlism/xx_net
gae_proxy/local/gae_handler.py
1
28513
#!/usr/bin/env python # coding:utf-8 import errno import time import struct import zlib import functools import re import io import string import socket import ssl import httplib import Queue import urlparse import threading from proxy import xlog from connect_manager import https_manager from appids_manager import appid_manager import OpenSSL NetWorkIOError = (socket.error, ssl.SSLError, OpenSSL.SSL.Error, OSError) from config import config from google_ip import google_ip def generate_message_html(title, banner, detail=''): MESSAGE_TEMPLATE = ''' <html><head> <meta http-equiv="content-type" content="text/html;charset=utf-8"> <title>$title</title> <style><!-- body {font-family: arial,sans-serif} div.nav {margin-top: 1ex} div.nav A {font-size: 10pt; font-family: arial,sans-serif} span.nav {font-size: 10pt; font-family: arial,sans-serif; font-weight: bold} div.nav A,span.big {font-size: 12pt; color: #0000cc} div.nav A {font-size: 10pt; color: black} A.l:link {color: #6f6f6f} A.u:link {color: green} //--></style> </head> <body text=#000000 bgcolor=#ffffff> <table border=0 cellpadding=2 cellspacing=0 width=100%> <tr><td bgcolor=#3366cc><font face=arial,sans-serif color=#ffffff><b>Message</b></td></tr> <tr><td> </td></tr></table> <blockquote> <H1>$banner</H1> $detail <p> </blockquote> <table width=100% cellpadding=0 cellspacing=0><tr><td bgcolor=#3366cc><img alt="" width=1 height=4></td></tr></table> </body></html> ''' return string.Template(MESSAGE_TEMPLATE).substitute(title=title, banner=banner, detail=detail) def spawn_later(seconds, target, *args, **kwargs): def wrap(*args, **kwargs): __import__('time').sleep(seconds) try: result = target(*args, **kwargs) except: result = None return result return __import__('thread').start_new_thread(wrap, args, kwargs) skip_headers = frozenset(['Vary', 'Via', 'X-Google-Cache-Control', 'X-Forwarded-For', 'Proxy-Authorization', 'Proxy-Connection', 'Upgrade', 'X-Chrome-Variations', 'Connection', 'Cache-Control' ]) def send_header(wfile, keyword, value): keyword = keyword.title() if keyword == 'Set-Cookie': # https://cloud.google.com/appengine/docs/python/urlfetch/responseobjects for cookie in re.split(r', (?=[^ =]+(?:=|$))', value): wfile.write("%s: %s\r\n" % (keyword, cookie)) #logging.debug("Head1 %s: %s", keyword, cookie) elif keyword == 'Content-Disposition' and '"' not in value: value = re.sub(r'filename=([^"\']+)', 'filename="\\1"', value) wfile.write("%s: %s\r\n" % (keyword, value)) #logging.debug("Head1 %s: %s", keyword, value) else: wfile.write("%s: %s\r\n" % (keyword, value)) #logging.debug("Head1 %s: %s", keyword, value) def _request(sock, headers, payload, bufsize=8192): request_data = 'POST /_gh/ HTTP/1.1\r\n' request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers) request_data += '\r\n' if isinstance(payload, bytes): sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = sock.send(payload[start:start+send_size]) start += sended elif hasattr(payload, 'read'): sock.send(request_data) while True: data = payload.read(bufsize) if not data: break sock.send(data) else: raise TypeError('_request(payload) must be a string or buffer, not %r' % type(payload)) response = httplib.HTTPResponse(sock, buffering=True) try: orig_timeout = sock.gettimeout() sock.settimeout(100) response.begin() sock.settimeout(orig_timeout) except httplib.BadStatusLine as e: #logging.warn("_request bad status line:%r", e) response.close() response = None except Exception as e: xlog.warn("_request:%r", e) return response class GAE_Exception(BaseException): def __init__(self, type, message): xlog.debug("GAE_Exception %r %r", type, message) self.type = type self.message = message def request(headers={}, payload=None): max_retry = 3 for i in range(max_retry): ssl_sock = None try: ssl_sock = https_manager.get_ssl_connection() if not ssl_sock: xlog.debug('create_ssl_connection fail') continue if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: google_ip.report_connect_closed(ssl_sock.ip, "no appid") time.sleep(60) raise GAE_Exception(1, "no appid can use") headers['Host'] = ssl_sock.appid + ".appspot.com" ssl_sock.host = headers['Host'] else: headers['Host'] = ssl_sock.host response = _request(ssl_sock, headers, payload) if not response: google_ip.report_connect_closed(ssl_sock.ip, "request_fail") ssl_sock.close() continue response.ssl_sock = ssl_sock return response except Exception as e: xlog.exception('request failed:%s', e) if ssl_sock: google_ip.report_connect_closed(ssl_sock.ip, "request_except") ssl_sock.close() raise GAE_Exception(2, "try max times") def inflate(data): return zlib.decompress(data, -zlib.MAX_WBITS) def deflate(data): return zlib.compress(data)[2:-4] def fetch(method, url, headers, body): if isinstance(body, basestring) and body: if len(body) < 10 * 1024 * 1024 and 'Content-Encoding' not in headers: zbody = deflate(body) if len(zbody) < len(body): body = zbody headers['Content-Encoding'] = 'deflate' if len(body) > 10 * 1024 * 1024: xlog.warn("body len:%d %s %s", len(body), method, url) headers['Content-Length'] = str(len(body)) # GAE donot allow set `Host` header if 'Host' in headers: del headers['Host'] kwargs = {} if config.GAE_PASSWORD: kwargs['password'] = config.GAE_PASSWORD #kwargs['options'] = #kwargs['validate'] = kwargs['maxsize'] = config.AUTORANGE_MAXSIZE kwargs['timeout'] = '19' payload = '%s %s HTTP/1.1\r\n' % (method, url) payload += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers) #for k, v in headers.items(): # logging.debug("Send %s: %s", k, v) payload += ''.join('X-URLFETCH-%s: %s\r\n' % (k, v) for k, v in kwargs.items() if v) request_headers = {} payload = deflate(payload) body = '%s%s%s' % (struct.pack('!h', len(payload)), payload, body) request_headers['Content-Length'] = str(len(body)) response = request(request_headers, body) response.app_msg = '' response.app_status = response.status if response.app_status != 200: return response data = response.read(2) if len(data) < 2: xlog.warn("fetch too short lead byte len:%d %s", len(data), url) response.app_status = 502 response.fp = io.BytesIO(b'connection aborted. too short lead byte data=' + data) response.read = response.fp.read return response headers_length, = struct.unpack('!h', data) data = response.read(headers_length) if len(data) < headers_length: xlog.warn("fetch too short header need:%d get:%d %s", headers_length, len(data), url) response.app_status = 509 response.fp = io.BytesIO(b'connection aborted. too short headers data=' + data) response.read = response.fp.read return response response.ssl_sock.received_size += headers_length raw_response_line, headers_data = inflate(data).split('\r\n', 1) _, response.status, response.reason = raw_response_line.split(None, 2) response.status = int(response.status) response.reason = response.reason.strip() response.msg = httplib.HTTPMessage(io.BytesIO(headers_data)) response.app_msg = response.msg.fp.read() return response normcookie = functools.partial(re.compile(', ([^ =]+(?:=|$))').sub, '\\r\\nSet-Cookie: \\1') normattachment = functools.partial(re.compile(r'filename=(.+?)').sub, 'filename="\\1"') def send_response(wfile, status=404, headers={}, body=''): headers = dict((k.title(), v) for k, v in headers.items()) if 'Transfer-Encoding' in headers: del headers['Transfer-Encoding'] if 'Content-Length' not in headers: headers['Content-Length'] = len(body) if 'Connection' not in headers: headers['Connection'] = 'close' wfile.write("HTTP/1.1 %d\r\n" % status) for key, value in headers.items(): #wfile.write("%s: %s\r\n" % (key, value)) send_header(wfile, key, value) wfile.write("\r\n") wfile.write(body) def return_fail_message(wfile): html = generate_message_html('504 GAEProxy Proxy Time out', u'连接超时,先休息一会再来!') send_response(wfile, 504, body=html.encode('utf-8')) return # fix bug for android market app: Mobogenie # GAE url_fetch refuse empty value in header. def clean_empty_header(headers): remove_list = [] for key in headers: value = headers[key] if value == "": remove_list.append(key) for key in remove_list: del headers[key] return headers def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) try: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue except Exception as e: errors.append(e) xlog.warn('gae_handler.handler %r %s , retry...', e, url) continue if response.app_status == 404: #xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip) google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist") appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it. %s', response.ssl_sock.appid, response.ssl_sock.ip) appid_manager.report_out_of_quota(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota") appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers['X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time()-time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length-1, content_length body_length = end - start + 1 last_read_time = time.time() time_response = time.time() while True: if start > end: time_finished = time.time() if body_length > 1024 and time_finished - time_response > 0: speed = body_length / (time_finished - time_response) xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000, length, response.ssl_sock.handshake_time, int(speed), response.status, url) else: xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000, length, response.ssl_sock.handshake_time, response.status, url) response.ssl_sock.received_size += body_length https_manager.save_ssl_connection_for_reuse(response.ssl_sock, call_time=time_request) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed(response.ssl_sock.ip, "down fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) google_ip.report_connect_closed(response.ssl_sock.ip, "Net") else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url) class RangeFetch(object): threads = config.AUTORANGE_THREADS maxsize = config.AUTORANGE_MAXSIZE bufsize = config.AUTORANGE_BUFSIZE waitsize = config.AUTORANGE_WAITSIZE def __init__(self, method, url, headers, body, response, wfile): self.method = method self.wfile = wfile self.url = url self.headers = headers self.body = body self.response = response self._stopped = False self._last_app_status = {} self.expect_begin = 0 def fetch(self): response_headers = dict((k.title(), v) for k, v in self.response.getheaders()) content_range = response_headers['Content-Range'] start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) if start == 0: response_headers['Content-Length'] = str(length) del response_headers['Content-Range'] else: response_headers['Content-Range'] = 'bytes %s-%s/%s' % (start, end, length) response_headers['Content-Length'] = str(length-start) xlog.info('>>>>>>>>>>>>>>> RangeFetch started(%r) %d-%d', self.url, start, end) try: self.wfile.write("HTTP/1.1 200 OK\r\n") for key in response_headers: if key == 'Transfer-Encoding': continue if key == 'X-Head-Content-Length': continue if key in skip_headers: continue value = response_headers[key] #logging.debug("Head %s: %s", key.title(), value) send_header(self.wfile, key, value) self.wfile.write("\r\n") except Exception as e: self._stopped = True xlog.warn("RangeFetch send response fail:%r %s", e, self.url) return data_queue = Queue.PriorityQueue() range_queue = Queue.PriorityQueue() range_queue.put((start, end, self.response)) self.expect_begin = start for begin in range(end+1, length, self.maxsize): range_queue.put((begin, min(begin+self.maxsize-1, length-1), None)) for i in xrange(0, self.threads): range_delay_size = i * self.maxsize spawn_later(float(range_delay_size)/self.waitsize, self.__fetchlet, range_queue, data_queue, range_delay_size) has_peek = hasattr(data_queue, 'peek') peek_timeout = 120 while self.expect_begin < length - 1: try: if has_peek: begin, data = data_queue.peek(timeout=peek_timeout) if self.expect_begin == begin: data_queue.get() elif self.expect_begin < begin: time.sleep(0.1) continue else: xlog.error('RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break else: begin, data = data_queue.get(timeout=peek_timeout) if self.expect_begin == begin: pass elif self.expect_begin < begin: data_queue.put((begin, data)) time.sleep(0.1) continue else: xlog.error('RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break except Queue.Empty: xlog.error('data_queue peek timeout, break') break try: ret = self.wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d, retry", ret) ret = self.wfile.write(data) xlog.debug("send to browser wfile.write ret:%d", ret) self.expect_begin += len(data) del data except Exception as e: xlog.warn('RangeFetch client closed(%s). %s', e, self.url) break self._stopped = True def __fetchlet(self, range_queue, data_queue, range_delay_size): headers = dict((k.title(), v) for k, v in self.headers.items()) headers['Connection'] = 'close' while not self._stopped: try: try: start, end, response = range_queue.get(timeout=1) if self.expect_begin < start and data_queue.qsize() * self.bufsize + range_delay_size > 30*1024*1024: range_queue.put((start, end, response)) time.sleep(10) continue headers['Range'] = 'bytes=%d-%d' % (start, end) if not response: response = fetch(self.method, self.url, headers, self.body) except Queue.Empty: continue except Exception as e: xlog.warning("RangeFetch fetch response %r in __fetchlet", e) range_queue.put((start, end, None)) continue if not response: xlog.warning('RangeFetch %s return %r', headers['Range'], response) range_queue.put((start, end, None)) continue if response.app_status != 200: xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range']) if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() range_queue.put((start, end, None)) continue if response.getheader('Location'): self.url = urlparse.urljoin(self.url, response.getheader('Location')) xlog.info('RangeFetch Redirect(%r)', self.url) google_ip.report_connect_closed(response.ssl_sock.ip, "reLocation") response.close() range_queue.put((start, end, None)) continue if 200 <= response.status < 300: content_range = response.getheader('Content-Range') if not content_range: xlog.warning('RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s', self.method, self.url, content_range, response.getheaders(), start, end) google_ip.report_connect_closed(response.ssl_sock.ip, "no range") response.close() range_queue.put((start, end, None)) continue content_length = int(response.getheader('Content-Length', 0)) xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range) time_last_read = time.time() while start < end + 1: try: data = response.read(self.bufsize) if not data: if time.time() - time_last_read > 20: break else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) data_queue.put((start, data)) start += data_len except Exception as e: xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e) break if start < end + 1: xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end) google_ip.report_connect_closed(response.ssl_sock.ip, "down err") response.close() range_queue.put((start, end, None)) continue https_manager.save_ssl_connection_for_reuse(response.ssl_sock) xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1) else: xlog.error('RangeFetch %r return %s', self.url, response.status) google_ip.report_connect_closed(response.ssl_sock.ip, "status err") response.close() range_queue.put((start, end, None)) continue except StandardError as e: xlog.exception('RangeFetch._fetchlet error:%s', e) raise
bsd-2-clause
sam-tsai/django-old
django/contrib/gis/sitemaps/views.py
20
4249
from django.http import HttpResponse, Http404 from django.template import loader from django.contrib.sites.models import Site from django.core import urlresolvers from django.core.paginator import EmptyPage, PageNotAnInteger from django.contrib.gis.db.models.fields import GeometryField from django.db import connections, DEFAULT_DB_ALIAS from django.db.models import get_model from django.utils.encoding import smart_str from django.contrib.gis.shortcuts import render_to_kml, render_to_kmz def index(request, sitemaps): """ This view generates a sitemap index that uses the proper view for resolving geographic section sitemap URLs. """ current_site = Site.objects.get_current() sites = [] protocol = request.is_secure() and 'https' or 'http' for section, site in sitemaps.items(): if callable(site): pages = site().paginator.num_pages else: pages = site.paginator.num_pages sitemap_url = urlresolvers.reverse('django.contrib.gis.sitemaps.views.sitemap', kwargs={'section': section}) sites.append('%s://%s%s' % (protocol, current_site.domain, sitemap_url)) if pages > 1: for page in range(2, pages+1): sites.append('%s://%s%s?p=%s' % (protocol, current_site.domain, sitemap_url, page)) xml = loader.render_to_string('sitemap_index.xml', {'sitemaps': sites}) return HttpResponse(xml, mimetype='application/xml') def sitemap(request, sitemaps, section=None): """ This view generates a sitemap with additional geographic elements defined by Google. """ maps, urls = [], [] if section is not None: if section not in sitemaps: raise Http404("No sitemap available for section: %r" % section) maps.append(sitemaps[section]) else: maps = sitemaps.values() page = request.GET.get("p", 1) for site in maps: try: if callable(site): urls.extend(site().get_urls(page)) else: urls.extend(site.get_urls(page)) except EmptyPage: raise Http404("Page %s empty" % page) except PageNotAnInteger: raise Http404("No page '%s'" % page) xml = smart_str(loader.render_to_string('gis/sitemaps/geo_sitemap.xml', {'urlset': urls})) return HttpResponse(xml, mimetype='application/xml') def kml(request, label, model, field_name=None, compress=False, using=DEFAULT_DB_ALIAS): """ This view generates KML for the given app label, model, and field name. The model's default manager must be GeoManager, and the field name must be that of a geographic field. """ placemarks = [] klass = get_model(label, model) if not klass: raise Http404('You must supply a valid app label and module name. Got "%s.%s"' % (label, model)) if field_name: try: info = klass._meta.get_field_by_name(field_name) if not isinstance(info[0], GeometryField): raise Exception except: raise Http404('Invalid geometry field.') connection = connections[using] if connection.ops.postgis: # PostGIS will take care of transformation. placemarks = klass._default_manager.using(using).kml(field_name=field_name) else: # There's no KML method on Oracle or MySQL, so we use the `kml` # attribute of the lazy geometry instead. placemarks = [] if connection.ops.oracle: qs = klass._default_manager.using(using).transform(4326, field_name=field_name) else: qs = klass._default_manager.using(using).all() for mod in qs: setattr(mod, 'kml', getattr(mod, field_name).kml) placemarks.append(mod) # Getting the render function and rendering to the correct. if compress: render = render_to_kmz else: render = render_to_kml return render('gis/kml/placemarks.kml', {'places' : placemarks}) def kmz(request, label, model, field_name=None, using=DEFAULT_DB_ALIAS): """ This view returns KMZ for the given app label, model, and field name. """ return kml(request, label, model, field_name, compress=True, using=using)
bsd-3-clause
ranqingfa/ardupilot
Tools/autotest/autotest.py
1
18485
#!/usr/bin/env python """ APM automatic test suite Andrew Tridgell, October 2011 """ from __future__ import print_function import atexit import fnmatch import glob import optparse import os import shutil import signal import sys import time import traceback import apmrover2 import arducopter import arduplane import quadplane import ardusub from pysim import util from pymavlink import mavutil from pymavlink.generator import mavtemplate def buildlogs_dirpath(): return os.getenv("BUILDLOGS", util.reltopdir("../buildlogs")) def buildlogs_path(path): '''return a string representing path in the buildlogs directory''' bits = [buildlogs_dirpath()] if isinstance(path, list): bits.extend(path) else: bits.append(path) return os.path.join(*bits) def get_default_params(atype, binary): """Get default parameters.""" # use rover simulator so SITL is not starved of input HOME = mavutil.location(40.071374969556928, -105.22978898137808, 1583.702759, 246) if "plane" in binary or "rover" in binary: frame = "rover" else: frame = "+" home = "%f,%f,%u,%u" % (HOME.lat, HOME.lng, HOME.alt, HOME.heading) sitl = util.start_SITL(binary, wipe=True, model=frame, home=home, speedup=10, unhide_parameters=True) mavproxy = util.start_MAVProxy_SITL(atype) print("Dumping defaults") idx = mavproxy.expect(['Please Run Setup', 'Saved [0-9]+ parameters to (\S+)']) if idx == 0: # we need to restart it after eeprom erase util.pexpect_close(mavproxy) util.pexpect_close(sitl) sitl = util.start_SITL(binary, model=frame, home=home, speedup=10) mavproxy = util.start_MAVProxy_SITL(atype) idx = mavproxy.expect('Saved [0-9]+ parameters to (\S+)') parmfile = mavproxy.match.group(1) dest = buildlogs_path('%s-defaults.parm' % atype) shutil.copy(parmfile, dest) util.pexpect_close(mavproxy) util.pexpect_close(sitl) print("Saved defaults for %s to %s" % (atype, dest)) return True def build_all(): """Run the build_all.sh script.""" print("Running build_all.sh") if util.run_cmd(util.reltopdir('Tools/scripts/build_all.sh'), directory=util.reltopdir('.')) != 0: print("Failed build_all.sh") return False return True def build_binaries(): """Run the build_binaries.py script.""" print("Running build_binaries.py") # copy the script as it changes git branch, which can change the script while running orig = util.reltopdir('Tools/scripts/build_binaries.py') copy = util.reltopdir('./build_binaries.py') shutil.copy2(orig, copy) # also copy generate_manifest library: orig_gm = util.reltopdir('Tools/scripts/generate_manifest.py') copy_gm = util.reltopdir('./generate_manifest.py') shutil.copy2(orig_gm, copy_gm) if util.run_cmd(copy, directory=util.reltopdir('.')) != 0: print("Failed build_binaries.py") return False return True def build_devrelease(): """Run the build_devrelease.sh script.""" print("Running build_devrelease.sh") # copy the script as it changes git branch, which can change the script while running orig = util.reltopdir('Tools/scripts/build_devrelease.sh') copy = util.reltopdir('./build_devrelease.sh') shutil.copy2(orig, copy) if util.run_cmd(copy, directory=util.reltopdir('.')) != 0: print("Failed build_devrelease.sh") return False return True def build_examples(): """Build examples.""" for target in 'px4-v2', 'navio': print("Running build.examples for %s" % target) try: util.build_examples(target) except Exception as e: print("Failed build_examples on board=%s" % target) print(str(e)) return False return True def build_parameters(): """Run the param_parse.py script.""" print("Running param_parse.py") if util.run_cmd(util.reltopdir('Tools/autotest/param_metadata/param_parse.py'), directory=util.reltopdir('.')) != 0: print("Failed param_parse.py") return False return True def convert_gpx(): """Convert any tlog files to GPX and KML.""" mavlog = glob.glob(buildlogs_path("*.tlog")) for m in mavlog: util.run_cmd(util.reltopdir("modules/mavlink/pymavlink/tools/mavtogpx.py") + " --nofixcheck " + m) gpx = m + '.gpx' kml = m + '.kml' util.run_cmd('gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s' % (gpx, kml), checkfail=False) util.run_cmd('zip %s.kmz %s.kml' % (m, m), checkfail=False) util.run_cmd("mavflightview.py --imagefile=%s.png %s" % (m, m)) return True def test_prerequisites(): """Check we have the right directories and tools to run tests.""" print("Testing prerequisites") util.mkdir_p(buildlogs_dirpath()) return True def alarm_handler(signum, frame): """Handle test timeout.""" global results, opts try: results.add('TIMEOUT', '<span class="failed-text">FAILED</span>', opts.timeout) util.pexpect_close_all() convert_gpx() write_fullresults() os.killpg(0, signal.SIGKILL) except Exception: pass sys.exit(1) def should_run_step(step): """See if a step should be skipped.""" for skip in skipsteps: if fnmatch.fnmatch(step.lower(), skip.lower()): return False return True __bin_names = { "ArduCopter" : "arducopter", "ArduPlane" : "arduplane", "APMrover2" : "ardurover", "AntennaTracker" : "antennatracker", "CopterAVC" : "arducopter-heli", "QuadPlane" : "arduplane", "ArduSub" : "ardusub" } def binary_path(step, debug=False): try: vehicle = step.split(".")[1] except Exception: return None if vehicle in __bin_names: binary_name = __bin_names[vehicle] else: # cope with builds that don't have a specific binary return None if debug: binary_basedir = "sitl-debug" else: binary_basedir = "sitl" binary = util.reltopdir(os.path.join('build', binary_basedir, 'bin', binary_name)) if not os.path.exists(binary): if os.path.exists(binary + ".exe"): binary += ".exe" else: raise ValueError("Binary (%s) does not exist" % (binary,)) return binary def run_step(step): """Run one step.""" # remove old logs util.run_cmd('/bin/rm -f logs/*.BIN logs/LASTLOG.TXT') if step == "prerequisites": return test_prerequisites() build_opts = { "j": opts.j, "debug": opts.debug, "clean": not opts.no_clean, "configure": not opts.no_configure, } if step == 'build.ArduPlane': return util.build_SITL('bin/arduplane', **build_opts) if step == 'build.APMrover2': return util.build_SITL('bin/ardurover', **build_opts) if step == 'build.ArduCopter': return util.build_SITL('bin/arducopter', **build_opts) if step == 'build.AntennaTracker': return util.build_SITL('bin/antennatracker', **build_opts) if step == 'build.Helicopter': return util.build_SITL('bin/arducopter-heli', **build_opts) if step == 'build.ArduSub': return util.build_SITL('bin/ardusub', **build_opts) binary = binary_path(step, debug=opts.debug) if step.startswith("default"): vehicle = step[8:] return get_default_params(vehicle, binary) fly_opts = { "viewerip": opts.viewerip, "use_map": opts.map, "valgrind": opts.valgrind, "gdb": opts.gdb, "gdbserver": opts.gdbserver, } if opts.speedup is not None: fly_opts.speedup = opts.speedup if step == 'fly.ArduCopter': return arducopter.fly_ArduCopter(binary, frame=opts.frame, **fly_opts) if step == 'fly.CopterAVC': return arducopter.fly_CopterAVC(binary, **fly_opts) if step == 'fly.ArduPlane': return arduplane.fly_ArduPlane(binary, **fly_opts) if step == 'fly.QuadPlane': return quadplane.fly_QuadPlane(binary, **fly_opts) if step == 'drive.APMrover2': return apmrover2.drive_APMrover2(binary, frame=opts.frame, **fly_opts) if step == 'dive.ArduSub': return ardusub.dive_ArduSub(binary, **fly_opts) if step == 'build.All': return build_all() if step == 'build.Binaries': return build_binaries() if step == 'build.DevRelease': return build_devrelease() if step == 'build.Examples': return build_examples() if step == 'build.Parameters': return build_parameters() if step == 'convertgpx': return convert_gpx() raise RuntimeError("Unknown step %s" % step) class TestResult(object): """Test result class.""" def __init__(self, name, result, elapsed): self.name = name self.result = result self.elapsed = "%.1f" % elapsed class TestFile(object): """Test result file.""" def __init__(self, name, fname): self.name = name self.fname = fname class TestResults(object): """Test results class.""" def __init__(self): self.date = time.asctime() self.githash = util.run_cmd('git rev-parse HEAD', output=True, directory=util.reltopdir('.')).strip() self.tests = [] self.files = [] self.images = [] def add(self, name, result, elapsed): """Add a result.""" self.tests.append(TestResult(name, result, elapsed)) def addfile(self, name, fname): """Add a result file.""" self.files.append(TestFile(name, fname)) def addimage(self, name, fname): """Add a result image.""" self.images.append(TestFile(name, fname)) def addglob(self, name, pattern): """Add a set of files.""" for f in glob.glob(buildlogs_path(pattern)): self.addfile(name, os.path.basename(f)) def addglobimage(self, name, pattern): """Add a set of images.""" for f in glob.glob(buildlogs_path(pattern)): self.addimage(name, os.path.basename(f)) def write_webresults(results_to_write): """Write webpage results.""" t = mavtemplate.MAVTemplate() for h in glob.glob(util.reltopdir('Tools/autotest/web/*.html')): html = util.loadfile(h) f = open(buildlogs_path(os.path.basename(h)), mode='w') t.write(f, html, results_to_write) f.close() for f in glob.glob(util.reltopdir('Tools/autotest/web/*.png')): shutil.copy(f, buildlogs_path(os.path.basename(f))) def write_fullresults(): """Write out full results set.""" global results results.addglob("Google Earth track", '*.kmz') results.addfile('Full Logs', 'autotest-output.txt') results.addglob('DataFlash Log', '*-log.bin') results.addglob("MAVLink log", '*.tlog') results.addglob("GPX track", '*.gpx') # results common to all vehicles: vehicle_files = [ ('{vehicle} build log', '{vehicle}.txt'), ('{vehicle} code size', '{vehicle}.sizes.txt'), ('{vehicle} stack sizes', '{vehicle}.framesizes.txt'), ('{vehicle} defaults', 'default_params/{vehicle}-defaults.parm'), ('{vehicle} core', '{vehicle}.core'), ('{vehicle} ELF', '{vehicle}.elf'), ] vehicle_globs = [('{vehicle} log', '{vehicle}-*.BIN'), ] for vehicle in 'ArduPlane','ArduCopter','APMrover2','AntennaTracker', 'ArduSub': subs = { 'vehicle': vehicle } for vehicle_file in vehicle_files: description = vehicle_file[0].format(**subs) filename = vehicle_file[1].format(**subs) results.addfile(description, filename) for vehicle_glob in vehicle_globs: description = vehicle_glob[0].format(**subs) glob = vehicle_glob[1].format(**subs) results.addglob(description, glob) results.addglob("CopterAVC log", 'CopterAVC-*.BIN') results.addfile("CopterAVC core", 'CopterAVC.core') results.addglob('APM:Libraries documentation', 'docs/libraries/index.html') results.addglob('APM:Plane documentation', 'docs/ArduPlane/index.html') results.addglob('APM:Copter documentation', 'docs/ArduCopter/index.html') results.addglob('APM:Rover documentation', 'docs/APMrover2/index.html') results.addglob('APM:Sub documentation', 'docs/ArduSub/index.html') results.addglobimage("Flight Track", '*.png') write_webresults(results) def check_logs(step): """Check for log files from a step.""" print("check step: ", step) if step.startswith('fly.'): vehicle = step[4:] elif step.startswith('drive.'): vehicle = step[6:] else: return logs = glob.glob("logs/*.BIN") for log in logs: bname = os.path.basename(log) newname = buildlogs_path("%s-%s" % (vehicle, bname)) print("Renaming %s to %s" % (log, newname)) shutil.move(log, newname) corefile = "core" if os.path.exists(corefile): newname = buildlogs_path("%s.core" % vehicle) print("Renaming %s to %s" % (corefile, newname)) shutil.move(corefile, newname) try: util.run_cmd('/bin/cp build/sitl/bin/* %s' % buildlogs_dirpath(), directory=util.reltopdir('.')) except Exception: print("Unable to save binary") def run_tests(steps): """Run a list of steps.""" global results passed = True failed = [] for step in steps: util.pexpect_close_all() t1 = time.time() print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime())) try: if run_step(step): results.add(step, '<span class="passed-text">PASSED</span>', time.time() - t1) print(">>>> PASSED STEP: %s at %s" % (step, time.asctime())) check_logs(step) else: print(">>>> FAILED STEP: %s at %s" % (step, time.asctime())) passed = False failed.append(step) results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1) except Exception as msg: passed = False failed.append(step) print(">>>> FAILED STEP: %s at %s (%s)" % (step, time.asctime(), msg)) traceback.print_exc(file=sys.stdout) results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1) check_logs(step) if not passed: print("FAILED %u tests: %s" % (len(failed), failed)) util.pexpect_close_all() write_fullresults() return passed if __name__ == "__main__": ############## main program ############# os.environ['PYTHONUNBUFFERED'] = '1' os.putenv('TMPDIR', util.reltopdir('tmp')) parser = optparse.OptionParser("autotest") parser.add_option("--skip", type='string', default='', help='list of steps to skip (comma separated)') parser.add_option("--list", action='store_true', default=False, help='list the available steps') parser.add_option("--viewerip", default=None, help='IP address to send MAVLink and fg packets to') parser.add_option("--map", action='store_true', default=False, help='show map') parser.add_option("--experimental", default=False, action='store_true', help='enable experimental tests') parser.add_option("--timeout", default=3000, type='int', help='maximum runtime in seconds') parser.add_option("--speedup", default=None, type='int', help='speedup to run the simulations at') parser.add_option("--valgrind", default=False, action='store_true', help='run ArduPilot binaries under valgrind') parser.add_option("--gdb", default=False, action='store_true', help='run ArduPilot binaries under gdb') parser.add_option("--debug", default=False, action='store_true', help='make built binaries debug binaries') parser.add_option("-j", default=None, type='int', help='build CPUs') parser.add_option("--frame", type='string', default=None, help='specify frame type') parser.add_option("--gdbserver", default=False, action='store_true', help='run ArduPilot binaries under gdbserver') parser.add_option("--no-clean", default=False, action='store_true', help='do not clean before building', dest="no_clean") parser.add_option("--no-configure", default=False, action='store_true', help='do not configure before building', dest="no_configure") opts, args = parser.parse_args() steps = [ 'prerequisites', 'build.All', 'build.Binaries', # 'build.DevRelease', 'build.Examples', 'build.Parameters', 'build.ArduPlane', 'defaults.ArduPlane', 'fly.ArduPlane', 'fly.QuadPlane', 'build.APMrover2', 'defaults.APMrover2', 'drive.APMrover2', 'build.ArduCopter', 'defaults.ArduCopter', 'fly.ArduCopter', 'build.Helicopter', 'fly.CopterAVC', 'build.AntennaTracker', 'build.ArduSub', 'defaults.ArduSub', 'dive.ArduSub', 'convertgpx', ] skipsteps = opts.skip.split(',') # ensure we catch timeouts signal.signal(signal.SIGALRM, alarm_handler) signal.alarm(opts.timeout) if opts.list: for step in steps: print(step) sys.exit(0) util.mkdir_p(buildlogs_dirpath()) lckfile = buildlogs_path('autotest.lck') print("lckfile=%s" % repr(lckfile)) lck = util.lock_file(lckfile) if lck is None: print("autotest is locked - exiting. lckfile=(%s)" % (lckfile,)) sys.exit(0) atexit.register(util.pexpect_close_all) if len(args) > 0: # allow a wildcard list of steps matched = [] for a in args: matches = [step for step in steps if fnmatch.fnmatch(step.lower(), a.lower())] if not len(matches): print("No steps matched {}".format(a)) sys.exit(1) matched.extend(matches) steps = matched # skip steps according to --skip option: steps_to_run = [ s for s in steps if should_run_step(s) ] results = TestResults() try: if not run_tests(steps_to_run): sys.exit(1) except KeyboardInterrupt: util.pexpect_close_all() sys.exit(1) except Exception: # make sure we kill off any children util.pexpect_close_all() raise
gpl-3.0
hainm/scipy
scipy/special/setup.py
77
4626
#!/usr/bin/env python from __future__ import division, print_function, absolute_import import os import sys from os.path import join from distutils.sysconfig import get_python_inc import numpy from numpy.distutils.misc_util import get_numpy_include_dirs try: from numpy.distutils.misc_util import get_info except ImportError: raise ValueError("numpy >= 1.4 is required (detected %s from %s)" % (numpy.__version__, numpy.__file__)) def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info as get_system_info config = Configuration('special', parent_package, top_path) define_macros = [] if sys.platform == 'win32': # define_macros.append(('NOINFINITIES',None)) # define_macros.append(('NONANS',None)) define_macros.append(('_USE_MATH_DEFINES',None)) curdir = os.path.abspath(os.path.dirname(__file__)) inc_dirs = [get_python_inc(), os.path.join(curdir, "c_misc")] if inc_dirs[0] != get_python_inc(plat_specific=1): inc_dirs.append(get_python_inc(plat_specific=1)) inc_dirs.insert(0, get_numpy_include_dirs()) # C libraries c_misc_src = [join('c_misc','*.c')] c_misc_hdr = [join('c_misc','*.h')] cephes_src = [join('cephes','*.c')] cephes_hdr = [join('cephes', '*.h')] config.add_library('sc_c_misc',sources=c_misc_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + cephes_src + c_misc_hdr + cephes_hdr + ['*.h']), macros=define_macros) config.add_library('sc_cephes',sources=cephes_src, include_dirs=[curdir] + inc_dirs, depends=(cephes_hdr + ['*.h']), macros=define_macros) # Fortran/C++ libraries mach_src = [join('mach','*.f')] amos_src = [join('amos','*.f')] cdf_src = [join('cdflib','*.f')] specfun_src = [join('specfun','*.f')] config.add_library('sc_mach',sources=mach_src, config_fc={'noopt':(__file__,1)}) config.add_library('sc_amos',sources=amos_src) config.add_library('sc_cdf',sources=cdf_src) config.add_library('sc_specfun',sources=specfun_src) # Extension specfun config.add_extension('specfun', sources=['specfun.pyf'], f2py_options=['--no-wrap-functions'], depends=specfun_src, define_macros=[], libraries=['sc_specfun']) # Extension _ufuncs headers = ['*.h', join('c_misc', '*.h'), join('cephes', '*.h')] ufuncs_src = ['_ufuncs.c', 'sf_error.c', '_logit.c.src', "amos_wrappers.c", "cdf_wrappers.c", "specfun_wrappers.c"] ufuncs_dep = (headers + ufuncs_src + amos_src + c_misc_src + cephes_src + mach_src + cdf_src + specfun_src) cfg = dict(get_system_info('lapack_opt')) cfg.setdefault('include_dirs', []).extend([curdir] + inc_dirs + [numpy.get_include()]) cfg.setdefault('libraries', []).extend(['sc_amos','sc_c_misc','sc_cephes','sc_mach', 'sc_cdf', 'sc_specfun']) cfg.setdefault('define_macros', []).extend(define_macros) config.add_extension('_ufuncs', depends=ufuncs_dep, sources=ufuncs_src, extra_info=get_info("npymath"), **cfg) # Extension _ufuncs_cxx ufuncs_cxx_src = ['_ufuncs_cxx.cxx', 'sf_error.c', '_faddeeva.cxx', 'Faddeeva.cc'] ufuncs_cxx_dep = (headers + ufuncs_cxx_src + cephes_src + ['*.hh']) config.add_extension('_ufuncs_cxx', sources=ufuncs_cxx_src, depends=ufuncs_cxx_dep, include_dirs=[curdir], define_macros=define_macros, extra_info=get_info("npymath")) cfg = dict(get_system_info('lapack_opt')) config.add_extension('_ellip_harm_2', sources=['_ellip_harm_2.c', 'sf_error.c',], **cfg ) config.add_data_files('tests/*.py') config.add_data_files('tests/data/README') config.add_data_files('tests/data/*.npz') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
bsd-3-clause
ergs/transmutagen
transmutagen/origen.py
1
19764
# PYTHON_ARGCOMPLETE_OK import argparse import os from subprocess import run import logging from itertools import combinations import numpy as np from scipy.sparse import csr_matrix from scipy.sparse.linalg import use_solver import tables from pyne.utils import toggle_warnings import warnings toggle_warnings() warnings.simplefilter('ignore') import pyne.data import pyne.material from pyne.origen22 import (nlbs, write_tape5_irradiation, write_tape4, parse_tape9, merge_tape9, write_tape9, parse_tape6) from pyne.material import from_atom_frac from .util import load_sparse_csr, time_func from .tape9utils import origen_to_name from .codegen import CRAM_matrix_exp_lambdify logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) # Change to WARN for less output logger.setLevel(logging.INFO) ORIGEN = '/home/origen22/code/o2_therm_linux.exe' # ORIGEN = '/home/o2prec/o2prec' decay_TAPE9 = "/home/origen22/libs/decay.lib" LIBS_DIR = "/home/origen22/libs" DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir, os.path.pardir, 'data')) NUCLIDE_KEYS = ['activation_products', 'actinides', 'fission_products'] def run_origen(origen): run(origen) return parse_tape6() def execute_origen(xs_tape9, time, nuclide, phi, origen, decay_tape9): xs_tape9 = xs_tape9 if not os.path.isabs(xs_tape9): xs_tape9 = os.path.join(LIBS_DIR, xs_tape9) parsed_xs_tape9 = parse_tape9(xs_tape9) parsed_decay_tape9 = parse_tape9(decay_tape9) merged_tape9 = merge_tape9([parsed_decay_tape9, parsed_xs_tape9]) # Can set outfile to change directory, but the file name needs to be # TAPE9.INP. write_tape9(merged_tape9) decay_nlb, xsfpy_nlb = nlbs(parsed_xs_tape9) # Can set outfile, but the file name should be called TAPE5.INP. write_tape5_irradiation("IRF", time/(60*60*24), phi, xsfpy_nlb=xsfpy_nlb, cut_off=0, out_table_num=[4, 5], out_table_nes=[True, False, False]) M = from_atom_frac({nuclide: 1}, mass=1, atoms_per_molecule=1) write_tape4(M) # Make pyne use naive atomic mass numbers to match ORIGEN for i in pyne.data.atomic_mass_map: pyne.data.atomic_mass_map[i] = float(pyne.nucname.anum(i)) origen_time, data = time_func(run_origen, origen) logger.info("ORIGEN runtime: %s", origen_time) return origen_time, data def load_data(datafile): import pyne.data # Make pyne use naive atomic mass numbers to match ORIGEN for i in pyne.data.atomic_mass_map: pyne.data.atomic_mass_map[i] = float(pyne.nucname.anum(i)) with open(datafile) as f: return eval(f.read(), {'array': np.array, 'pyne': pyne}) def origen_to_array(origen_dict, nucs): new_data = np.zeros((len(nucs), 1)) nuc_to_idx = {v: i for i, v in enumerate(nucs)} for i in origen_dict: new_data[nuc_to_idx[origen_to_name(i)]] += origen_dict[i][1] return new_data def origen_data_to_array_weighted(ORIGEN_data, nucs, n_fission_fragments=2.004): # Table 5 is grams table_5_weights = {} table_5_nuclide = ORIGEN_data['table_5']['nuclide'] for key in NUCLIDE_KEYS: table_5_weights[key] = np.sum(origen_to_array(table_5_nuclide[key], nucs), axis=0) table_5_weights['fission_products'] *= n_fission_fragments # Table 4 is atom fraction table_4_nuclide = ORIGEN_data['table_4']['nuclide'] new_data = np.zeros((len(nucs), 1)) for key in NUCLIDE_KEYS: new_data += table_5_weights[key]*origen_to_array(table_4_nuclide[key], nucs) return new_data def origen_data_to_array_atom_fraction(ORIGEN_data, nucs): # Table 4 is atom fraction table_4_nuclide = ORIGEN_data['table_4']['nuclide'] new_data = np.zeros((len(nucs), 1)) for key in NUCLIDE_KEYS: new_data += origen_to_array(table_4_nuclide[key], nucs) return new_data def origen_data_to_array_materials(ORIGEN_data, nucs): material = ORIGEN_data['materials'][1] new_data = np.zeros((len(nucs), 1)) nuc_to_idx = {v: i for i, v in enumerate(nucs)} for nuc, atom_frac in material.to_atom_frac().items(): new_data[nuc_to_idx[pyne.nucname.name(nuc)]] = atom_frac return new_data def hash_data(vec, library, time, phi, n_fission_fragments): return hash((tuple(vec.flat), library, time, phi, n_fission_fragments)) def initial_vector(start_nuclide, nucs): nuc_to_idx = {v: i for i, v in enumerate(nucs)} return csr_matrix(([1], [[nuc_to_idx[start_nuclide]], [0]]), shape=(len(nucs), 1)).toarray() def test_origen_data_sanity(ORIGEN_data): for table in ['table_4', 'table_5']: assert table in ORIGEN_data, table assert 'nuclide' in ORIGEN_data[table] nuclide = ORIGEN_data['table_4']['nuclide'] # Sanity check for comb in combinations(NUCLIDE_KEYS, 2): a, b = comb for common in set.intersection(set(nuclide[a]), set(nuclide[b])): array_a, array_b = nuclide[a][common], nuclide[b][common] assert np.allclose(array_a, 0) \ or np.allclose(array_b, 0) # or np.allclose(array_a, array_b) def create_hdf5_table(file, lib, nucs): nucs_size = len(nucs) desc_common = [ ('hash', np.int64), ('library', 'S8'), ('initial vector', np.float64, (nucs_size, 1)), ('time', np.float64), ('phi', np.float64), ('n_fission_fragments', np.float64), ] desc_origen = [ ('execution time ORIGEN', np.float64), ('ORIGEN atom fraction', np.float64, (nucs_size, 1)), ('ORIGEN mass fraction', np.float64, (nucs_size, 1)), ] desc_cram_lambdify = [ ('execution time CRAM lambdify', np.float64), ('CRAM lambdify atom fraction', np.float64, (nucs_size, 1)), ('CRAM lambdify mass fraction', np.float64, (nucs_size, 1)), ] desc_cram_py_solve = [ ('execution time CRAM py_solve', np.float64), ('CRAM py_solve atom fraction', np.float64, (nucs_size, 1)), ('CRAM py_solve mass fraction', np.float64, (nucs_size, 1)), ] h5file = tables.open_file(file, mode="a", title="CRAM/ORIGEN test run data", filters=tables.Filters(complevel=1)) h5file.create_group('/', lib, '%s data' % lib) h5file.create_table('/' + lib, 'origen', np.dtype(desc_common + desc_origen)) h5file.create_table('/' + lib, 'cram-lambdify-umfpack', np.dtype(desc_common + desc_cram_lambdify)) h5file.create_table('/' + lib, 'cram-lambdify-superlu', np.dtype(desc_common + desc_cram_lambdify)) h5file.create_table('/' + lib, 'cram-py_solve', np.dtype(desc_common + desc_cram_py_solve)) h5file.create_array('/' + lib, 'nucs', np.array(nucs, 'S6')) def save_file_origen(file, *, ORIGEN_data, lib, nucs, start_nuclide, time, phi, ORIGEN_time, n_fission_fragments=2.004): with tables.open_file(file, mode="a", title="ORIGEN and CRAM data", filters=tables.Filters(complevel=1)) as h5file: if lib not in h5file.root: create_hdf5_table(file, lib, nucs) table = h5file.get_node(h5file.root, lib + '/origen') table.row['initial vector'] = vec = initial_vector(start_nuclide, nucs) table.row['library'] = lib table.row['hash'] = hash_data(vec, lib, time, phi, n_fission_fragments) table.row['time'] = time table.row['phi'] = phi table.row['n_fission_fragments'] = n_fission_fragments table.row['execution time ORIGEN'] = ORIGEN_time table.row['ORIGEN atom fraction'] = origen_data_to_array_weighted(ORIGEN_data, nucs, n_fission_fragments=n_fission_fragments) table.row['ORIGEN mass fraction'] = origen_data_to_array_materials(ORIGEN_data, nucs) table.row.append() table.flush() def save_file_cram_lambdify(file, *, CRAM_lambdify_res, lib, nucs, start_nuclide, time, phi, CRAM_lambdify_time, umfpack, n_fission_fragments=2.004): assert len(CRAM_lambdify_res) == len(nucs) with tables.open_file(file, mode="a", title="ORIGEN and CRAM data", filters=tables.Filters(complevel=1)) as h5file: if lib not in h5file.root: create_hdf5_table(file, lib, nucs) nodename = '/cram-lambdify-umfpack' if umfpack else '/cram-lambdify-superlu' table = h5file.get_node(h5file.root, lib + nodename) table.row['initial vector'] = vec = initial_vector(start_nuclide, nucs) table.row['library'] = lib table.row['hash'] = hash_data(vec, lib, time, phi, n_fission_fragments) table.row['time'] = time table.row['phi'] = phi table.row['n_fission_fragments'] = n_fission_fragments table.row['execution time CRAM lambdify'] = CRAM_lambdify_time table.row['CRAM lambdify atom fraction'] = CRAM_lambdify_res CRAM_lambdify_res_normalized = CRAM_lambdify_res/np.sum(CRAM_lambdify_res) table.row['CRAM lambdify mass fraction'] = CRAM_lambdify_res_normalized table.row.append() table.flush() def save_file_cram_py_solve(file, *, CRAM_py_solve_res, lib, nucs, start_nuclide, time, phi, CRAM_py_solve_time, n_fission_fragments=2.004): assert len(CRAM_py_solve_res) == len(nucs) with tables.open_file(file, mode="a", title="ORIGEN and CRAM data", filters=tables.Filters(complevel=1)) as h5file: if lib not in h5file.root: create_hdf5_table(file, lib, nucs) table = h5file.get_node(h5file.root, lib + '/cram-py_solve') table.row['initial vector'] = vec = initial_vector(start_nuclide, nucs) table.row['library'] = lib table.row['hash'] = hash_data(vec, lib, time, phi, n_fission_fragments) table.row['time'] = time table.row['phi'] = phi table.row['n_fission_fragments'] = n_fission_fragments table.row['execution time CRAM py_solve'] = CRAM_py_solve_time table.row['CRAM py_solve atom fraction'] = CRAM_py_solve_res CRAM_py_solve_res_normalized = CRAM_py_solve_res/np.sum(CRAM_py_solve_res) table.row['CRAM py_solve mass fraction'] = CRAM_py_solve_res_normalized table.row.append() table.flush() def test_origen_against_CRAM_lambdify(xs_tape9, time, nuclide, phi, umfpack, alpha_as_He4=False): e_complex = CRAM_matrix_exp_lambdify() lambdify_desc = 'CRAM lambdify UMFPACK' if umfpack else 'CRAM lambdify SuperLU' logger.info("Running %s %s at time=%s, nuclide=%s, phi=%s", lambdify_desc, xs_tape9, time, nuclide, phi) logger.info('-'*80) alpha_part = '_alpha_as_He4' if alpha_as_He4 else '' npzfilename = os.path.join('data', os.path.splitext(os.path.basename(xs_tape9))[0] + '_' + str(phi) + alpha_part + '.npz') nucs, mat = load_sparse_csr(npzfilename) assert mat.shape[1] == len(nucs) b = initial_vector(nuclide, nucs) use_solver(useUmfpack=umfpack) CRAM_lambdify_time, CRAM_lambdify_res = time_func(e_complex, -mat*float(time), b) CRAM_lambdify_res = np.asarray(CRAM_lambdify_res) logger.info("%s runtime: %s", lambdify_desc, CRAM_lambdify_time) return CRAM_lambdify_time, CRAM_lambdify_res def test_origen_against_CRAM_py_solve(xs_tape9, time, nuclide, phi, alpha_as_He4=False): from .py_solve import expm_multiply14, asflat, N logger.info("Running CRAM pysolve %s at time=%s, nuclide=%s, phi=%s", xs_tape9, time, nuclide, phi) logger.info('-'*80) alpha_part = '_alpha_as_He4' if alpha_as_He4 else '' npzfilename = os.path.join('data', os.path.splitext(os.path.basename(xs_tape9))[0] + '_' + str(phi) + alpha_part + '.npz') nucs, mat = load_sparse_csr(npzfilename) assert mat.shape[1] == len(nucs) == N A = asflat(mat) b = initial_vector(nuclide, nucs) b = np.asarray(b, dtype='float64') CRAM_py_solve_time, CRAM_py_solve_res = time_func(expm_multiply14, -A*float(time), b) CRAM_py_solve_res = np.asarray(CRAM_py_solve_res) logger.info("CRAM py_solve runtime: %s", CRAM_py_solve_time) return CRAM_py_solve_time, CRAM_py_solve_res def compute_mismatch(ORIGEN_res_weighted, ORIGEN_res_materials, CRAM_lambdify_umfpack_res, CRAM_lambdify_superlu_res, CRAM_py_solve_res, nucs, rtol=1e-3, atol=1e-5): """ Computes a mismatch analysis for an ORIGEN run vs. CRAM The default rtol is 1e-3 because ORIGEN returns 3 digits. The default atol is 1e-5 because ORIGEN stops the taylor expansion with the error term exp(ASUM)*ASUM**n/n! (using Sterling's approximation), where n = 3.5*ASUM + 6 and ASUM is the max of the column sums. The max of the column sums is ~2 because of fission, giving ~1e-5 (see ORIGEN lines 5075-5100) """ CRAM_lambdify_umfpack_res_normalized = CRAM_lambdify_umfpack_res/np.sum(CRAM_lambdify_umfpack_res) CRAM_lambdify_superlu_res_normalized = CRAM_lambdify_superlu_res/np.sum(CRAM_lambdify_superlu_res) CRAM_py_solve_res_normalized = CRAM_py_solve_res/np.sum(CRAM_py_solve_res) for Clumf, Clsuper, Cpy, O, units in [ (CRAM_lambdify_umfpack_res, CRAM_lambdify_superlu_res, CRAM_py_solve_res, ORIGEN_res_weighted, 'atom fractions'), (CRAM_lambdify_umfpack_res_normalized, CRAM_lambdify_superlu_res_normalized, CRAM_py_solve_res_normalized, ORIGEN_res_materials, 'mass fractions'), # (CRAM_res_normalized, ORIGEN_res_atom_fraction, 'atom fraction'), ]: logger.info("Units: %s", units) d = {'CRAM lambdify UMFPACK': Clumf, 'CRAM lambdify SuperLU': Clsuper, 'CRAM py_solve': Cpy, 'ORIGEN': O} for a_desc, b_desc in ( ['CRAM lambdify UMFPACK', 'CRAM lambdify SuperLU'], ['CRAM lambdify UMFPACK', 'CRAM py_solve'], ['CRAM lambdify SuperLU', 'CRAM py_solve'], ['CRAM lambdify UMFPACK', 'ORIGEN'], ['CRAM lambdify SuperLU', 'ORIGEN'], ['CRAM py_solve', 'ORIGEN'], ): a, b = d[a_desc], d[b_desc] mismatching_indices = array_mismatch(a, b, rtol=rtol, atol=atol) if mismatching_indices: logger.info("%s and %s mismatch: Not equal to tolerance rtol=%s, atol=%s", a_desc, b_desc, rtol, atol) logger.info("Mismatching elements sorted by error (%s, %s, symmetric relative error):", a_desc, b_desc) rel_error = abs(a - b)/(a + b) for i in mismatching_indices: logger.info("%s %s %s %s", nucs[i], a[i], b[i], rel_error[i]) else: logger.info("%s and %s arrays match with rtol=%s atol=%s", a_desc, b_desc, rtol, atol) logger.info('') # TODO: return some information here def array_mismatch(a, b, rtol=1e-3, atol=1e-5): """ Test if arrays a and b mismatch with rtol and atol If they do, return a list of mismatching indices. Otherwise, return False. """ mismatching_indices = [] try: np.testing.assert_allclose(a, b, rtol=rtol, atol=atol) except AssertionError as e: D = np.isclose(a, b, rtol=rtol, atol=atol) rel_error = abs(a - b)/(a + b) for i, in np.argsort(rel_error, axis=0)[::-1]: if D[i]: continue mismatching_indices.append(i) return mismatching_indices else: return False def make_parser(): p = argparse.ArgumentParser('origen', formatter_class=argparse.ArgumentDefaultsHelpFormatter) p.add_argument('xs_tape9', metavar='xs-tape9', help="""path to the cross section TAPE9 file. If the path is not absolute, defaults to looking in {LIBS_DIR}""".format(LIBS_DIR=LIBS_DIR)) p.add_argument('time', help='the time in sec', type=float) p.add_argument('--phi', help='the neutron flux in [n/cm^2/sec]', type=float, default=4e14) p.add_argument('--nuclide', help="The initial starting nuclide.", default="U235") p.add_argument('--decay-tape9', help="path to the decay TAPE9 file.", default=decay_TAPE9) p.add_argument('--origen', help="Path to the origen executable", default=ORIGEN) p.add_argument('--no-run-origen', action='store_false', dest='run_origen', help="Don't run origen") p.add_argument('--no-run-cram', action='store_false', dest='run_cram', help="Don't run cram") p.add_argument('--hdf5-file', default='data/results.hdf5', help="""hdf5 file to write results to""") return p def execute(xs_tape9, time, phi, nuclide, hdf5_file='data/results.hdf5', decay_tape9=decay_TAPE9, origen=ORIGEN, run_origen=True, run_cram_lambdify=True, run_cram_py_solve=True, alpha_as_He4=False): lib = os.path.splitext(os.path.basename(xs_tape9))[0] alpha_part = '_alpha_as_He4' if alpha_as_He4 else '' npzfilename = os.path.join('data', lib + '_' + str(phi) + alpha_part + '.npz') nucs, mat = load_sparse_csr(npzfilename) if run_origen: n_fission_fragments = 2.004 ORIGEN_time, ORIGEN_data = execute_origen(xs_tape9, time, nuclide, phi, origen, decay_tape9) ORIGEN_res_weighted = origen_data_to_array_weighted(ORIGEN_data, nucs, n_fission_fragments=n_fission_fragments) ORIGEN_res_materials = origen_data_to_array_materials(ORIGEN_data, nucs) test_origen_data_sanity(ORIGEN_data) save_file_origen(hdf5_file, ORIGEN_data=ORIGEN_data, lib=lib, nucs=nucs, start_nuclide=nuclide, time=time, phi=phi, ORIGEN_time=ORIGEN_time, n_fission_fragments=n_fission_fragments, ) if run_cram_lambdify: try: import scikits.umfpack scikits.umfpack except ImportError: raise ImportError("scikits.umfpack is required. conda install scikit-umfpack") umfpack = True CRAM_lambdify_umfpack_time, CRAM_lambdify_umfpack_res = test_origen_against_CRAM_lambdify(xs_tape9, time, nuclide, phi, umfpack, alpha_as_He4=alpha_as_He4) save_file_cram_lambdify(hdf5_file, CRAM_lambdify_res=CRAM_lambdify_umfpack_res, lib=lib, nucs=nucs, start_nuclide=nuclide, time=time, phi=phi, CRAM_lambdify_time=CRAM_lambdify_umfpack_time, umfpack=umfpack, ) umfpack = False CRAM_lambdify_superlu_time, CRAM_lambdify_superlu_res = test_origen_against_CRAM_lambdify(xs_tape9, time, nuclide, phi, umfpack, alpha_as_He4=alpha_as_He4) save_file_cram_lambdify(hdf5_file, CRAM_lambdify_res=CRAM_lambdify_superlu_res, lib=lib, nucs=nucs, start_nuclide=nuclide, time=time, phi=phi, CRAM_lambdify_time=CRAM_lambdify_superlu_time, umfpack=umfpack, ) if run_cram_py_solve: CRAM_py_solve_time, CRAM_py_solve_res = test_origen_against_CRAM_py_solve(xs_tape9, time, nuclide, phi, alpha_as_He4=alpha_as_He4) save_file_cram_py_solve(hdf5_file, CRAM_py_solve_res=CRAM_py_solve_res, lib=lib, nucs=nucs, start_nuclide=nuclide, time=time, phi=phi, CRAM_py_solve_time=CRAM_py_solve_time, ) if run_origen and run_cram_lambdify and run_cram_py_solve: compute_mismatch(ORIGEN_res_weighted, ORIGEN_res_materials, CRAM_lambdify_umfpack_res, CRAM_lambdify_superlu_res, CRAM_py_solve_res, nucs) def main(): p = make_parser() try: import argcomplete argcomplete.autocomplete(p) except ImportError: pass args = p.parse_args() execute(**vars(args)) if __name__ == '__main__': main()
bsd-3-clause
artnavsegda/avrnavsegda
xmega/bmp/bmp/src/ASF/common/services/gfx_mono/tools/bitmap.py
73
2301
## # \file # # \brief Output a 2 color bitmap as an uint8_t array # # Copyright (C) 2011-2014 Atmel Corporation. All rights reserved. # # \page License # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. The name of Atmel may not be used to endorse or promote products derived # from this software without specific prior written permission. # # 4. This software may only be redistributed and used in connection with an # Atmel microcontroller product. # # THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE # EXPRESSLY AND SPECIFICALLY DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from PIL import Image import sys im = Image.open(sys.argv[1]); new_im = im.load() width, height = im.size for y in range(0, height) : for x in range(0, width) : if 0 < new_im[x, y]: new_im[x, y] = 1 sys.stdout.write(str(new_im[x, y])) sys.stdout.write("\n") sys.stdout.write("\n uint8_t image_header[] = {\n") for y in range(0, height, 8) : for x in range(0, width) : first_byte = str(new_im[x, y + 7]) + str(new_im[x, y+6]) + str(new_im[x, y+5]) + str(new_im[x, y+4]) + str(new_im[x, y+3]) + str(new_im[x, y+2]) + str(new_im[x, y+1]) + str(new_im[x, y+0]) print "0x%x," % int(first_byte, 2), sys.stdout.write("};\n")
lgpl-3.0
mbareta/edx-platform-ft
lms/djangoapps/courseware/tests/test_module_render.py
8
86350
# -*- coding: utf-8 -*- """ Test for lms courseware app, module render unit """ import ddt import itertools import json from nose.plugins.attrib import attr from functools import partial from bson import ObjectId from django.http import Http404, HttpResponse from django.core.urlresolvers import reverse from django.conf import settings from django.test.client import RequestFactory from django.test.utils import override_settings from django.contrib.auth.models import AnonymousUser from mock import MagicMock, patch, Mock from opaque_keys.edx.keys import UsageKey, CourseKey from opaque_keys.edx.locations import SlashSeparatedCourseKey from pyquery import PyQuery from courseware.module_render import hash_resource from xblock.field_data import FieldData from xblock.runtime import Runtime from xblock.fields import ScopeIds from xblock.core import XBlock, XBlockAside from xblock.fragment import Fragment from capa.tests.response_xml_factory import OptionResponseXMLFactory from course_modes.models import CourseMode from courseware import module_render as render from courseware.courses import get_course_with_access, get_course_info_section from courseware.field_overrides import OverrideFieldData from courseware.model_data import FieldDataCache from courseware.module_render import hash_resource, get_module_for_descriptor from courseware.models import StudentModule from courseware.tests.factories import StudentModuleFactory, UserFactory, GlobalStaffFactory from courseware.tests.tests import LoginEnrollmentTestCase from courseware.tests.test_submitting_problems import TestSubmittingProblems from lms.djangoapps.lms_xblock.runtime import quote_slashes from lms.djangoapps.lms_xblock.field_data import LmsFieldData from openedx.core.lib.courses import course_image_url from openedx.core.lib.gating import api as gating_api from student.models import anonymous_id_for_user from xmodule.modulestore.tests.django_utils import ( ModuleStoreTestCase, SharedModuleStoreTestCase, TEST_DATA_MIXED_MODULESTORE ) from xmodule.lti_module import LTIDescriptor from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore.django import modulestore from xmodule.modulestore.tests.factories import ItemFactory, CourseFactory, ToyCourseFactory, check_mongo_calls from xmodule.modulestore.tests.test_asides import AsideTestType from xmodule.x_module import XModuleDescriptor, XModule, STUDENT_VIEW, CombinedSystem from openedx.core.djangoapps.credit.models import CreditCourse from openedx.core.djangoapps.credit.api import ( set_credit_requirements, set_credit_requirement_status ) from edx_proctoring.api import ( create_exam, create_exam_attempt, update_attempt_status ) from edx_proctoring.runtime import set_runtime_service from edx_proctoring.tests.test_services import MockCreditService from milestones.tests.utils import MilestonesTestCaseMixin TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT @XBlock.needs("field-data") @XBlock.needs("i18n") @XBlock.needs("fs") @XBlock.needs("user") @XBlock.needs("bookmarks") class PureXBlock(XBlock): """ Pure XBlock to use in tests. """ pass class EmptyXModule(XModule): # pylint: disable=abstract-method """ Empty XModule for testing with no dependencies. """ pass class EmptyXModuleDescriptor(XModuleDescriptor): # pylint: disable=abstract-method """ Empty XModule for testing with no dependencies. """ module_class = EmptyXModule class GradedStatelessXBlock(XBlock): """ This XBlock exists to test grade storage for blocks that don't store student state in a scoped field. """ @XBlock.json_handler def set_score(self, json_data, suffix): # pylint: disable=unused-argument """ Set the score for this testing XBlock. """ self.runtime.publish( self, 'grade', { 'value': json_data['grade'], 'max_value': 1 } ) @attr('shard_1') @ddt.ddt class ModuleRenderTestCase(SharedModuleStoreTestCase, LoginEnrollmentTestCase): """ Tests of courseware.module_render """ @classmethod def setUpClass(cls): super(ModuleRenderTestCase, cls).setUpClass() cls.course_key = ToyCourseFactory.create().id cls.toy_course = modulestore().get_course(cls.course_key) # TODO: this test relies on the specific setup of the toy course. # It should be rewritten to build the course it needs and then test that. def setUp(self): """ Set up the course and user context """ super(ModuleRenderTestCase, self).setUp() self.mock_user = UserFactory() self.mock_user.id = 1 self.request_factory = RequestFactory() # Construct a mock module for the modulestore to return self.mock_module = MagicMock() self.mock_module.id = 1 self.dispatch = 'score_update' # Construct a 'standard' xqueue_callback url self.callback_url = reverse( 'xqueue_callback', kwargs=dict( course_id=self.course_key.to_deprecated_string(), userid=str(self.mock_user.id), mod_id=self.mock_module.id, dispatch=self.dispatch ) ) def test_get_module(self): self.assertEqual( None, render.get_module('dummyuser', None, 'invalid location', None) ) def test_module_render_with_jump_to_id(self): """ This test validates that the /jump_to_id/<id> shorthand for intracourse linking works assertIn expected. Note there's a HTML element in the 'toy' course with the url_name 'toyjumpto' which defines this linkage """ mock_request = MagicMock() mock_request.user = self.mock_user course = get_course_with_access(self.mock_user, 'load', self.course_key) field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course_key, self.mock_user, course, depth=2) module = render.get_module( self.mock_user, mock_request, self.course_key.make_usage_key('html', 'toyjumpto'), field_data_cache, ) # get the rendered HTML output which should have the rewritten link html = module.render(STUDENT_VIEW).content # See if the url got rewritten to the target link # note if the URL mapping changes then this assertion will break self.assertIn('/courses/' + self.course_key.to_deprecated_string() + '/jump_to_id/vertical_test', html) def test_xqueue_callback_success(self): """ Test for happy-path xqueue_callback """ fake_key = 'fake key' xqueue_header = json.dumps({'lms_key': fake_key}) data = { 'xqueue_header': xqueue_header, 'xqueue_body': 'hello world', } # Patch getmodule to return our mock module with patch('courseware.module_render.load_single_xblock', return_value=self.mock_module): # call xqueue_callback with our mocked information request = self.request_factory.post(self.callback_url, data) render.xqueue_callback( request, unicode(self.course_key), self.mock_user.id, self.mock_module.id, self.dispatch ) # Verify that handle ajax is called with the correct data request.POST['queuekey'] = fake_key self.mock_module.handle_ajax.assert_called_once_with(self.dispatch, request.POST) def test_xqueue_callback_missing_header_info(self): data = { 'xqueue_header': '{}', 'xqueue_body': 'hello world', } with patch('courseware.module_render.load_single_xblock', return_value=self.mock_module): # Test with missing xqueue data with self.assertRaises(Http404): request = self.request_factory.post(self.callback_url, {}) render.xqueue_callback( request, unicode(self.course_key), self.mock_user.id, self.mock_module.id, self.dispatch ) # Test with missing xqueue_header with self.assertRaises(Http404): request = self.request_factory.post(self.callback_url, data) render.xqueue_callback( request, unicode(self.course_key), self.mock_user.id, self.mock_module.id, self.dispatch ) def test_get_score_bucket(self): self.assertEquals(render.get_score_bucket(0, 10), 'incorrect') self.assertEquals(render.get_score_bucket(1, 10), 'partial') self.assertEquals(render.get_score_bucket(10, 10), 'correct') # get_score_bucket calls error cases 'incorrect' self.assertEquals(render.get_score_bucket(11, 10), 'incorrect') self.assertEquals(render.get_score_bucket(-1, 10), 'incorrect') def test_anonymous_handle_xblock_callback(self): dispatch_url = reverse( 'xblock_handler', args=[ self.course_key.to_deprecated_string(), quote_slashes(self.course_key.make_usage_key('videosequence', 'Toy_Videos').to_deprecated_string()), 'xmodule_handler', 'goto_position' ] ) response = self.client.post(dispatch_url, {'position': 2}) self.assertEquals(403, response.status_code) self.assertEquals('Unauthenticated', response.content) def test_missing_position_handler(self): """ Test that sending POST request without or invalid position argument don't raise server error """ self.client.login(username=self.mock_user.username, password="test") dispatch_url = reverse( 'xblock_handler', args=[ self.course_key.to_deprecated_string(), quote_slashes(self.course_key.make_usage_key('videosequence', 'Toy_Videos').to_deprecated_string()), 'xmodule_handler', 'goto_position' ] ) response = self.client.post(dispatch_url) self.assertEqual(200, response.status_code) self.assertEqual(json.loads(response.content), {'success': True}) response = self.client.post(dispatch_url, {'position': ''}) self.assertEqual(200, response.status_code) self.assertEqual(json.loads(response.content), {'success': True}) response = self.client.post(dispatch_url, {'position': '-1'}) self.assertEqual(200, response.status_code) self.assertEqual(json.loads(response.content), {'success': True}) response = self.client.post(dispatch_url, {'position': "string"}) self.assertEqual(200, response.status_code) self.assertEqual(json.loads(response.content), {'success': True}) response = self.client.post(dispatch_url, {'position': u"Φυσικά"}) self.assertEqual(200, response.status_code) self.assertEqual(json.loads(response.content), {'success': True}) response = self.client.post(dispatch_url, {'position': None}) self.assertEqual(200, response.status_code) self.assertEqual(json.loads(response.content), {'success': True}) @ddt.data('pure', 'vertical') @XBlock.register_temp_plugin(PureXBlock, identifier='pure') def test_rebinding_same_user(self, block_type): request = self.request_factory.get('') request.user = self.mock_user course = CourseFactory() descriptor = ItemFactory(category=block_type, parent=course) field_data_cache = FieldDataCache([self.toy_course, descriptor], self.toy_course.id, self.mock_user) # This is verifying that caching doesn't cause an error during get_module_for_descriptor, which # is why it calls the method twice identically. render.get_module_for_descriptor( self.mock_user, request, descriptor, field_data_cache, self.toy_course.id, course=self.toy_course ) render.get_module_for_descriptor( self.mock_user, request, descriptor, field_data_cache, self.toy_course.id, course=self.toy_course ) @override_settings(FIELD_OVERRIDE_PROVIDERS=( 'courseware.student_field_overrides.IndividualStudentOverrideProvider', )) def test_rebind_different_users(self): """ This tests the rebinding a descriptor to a student does not result in overly nested _field_data. """ request = self.request_factory.get('') request.user = self.mock_user course = CourseFactory.create() descriptor = ItemFactory(category='html', parent=course) field_data_cache = FieldDataCache( [course, descriptor], course.id, self.mock_user ) # grab what _field_data was originally set to original_field_data = descriptor._field_data # pylint: disable=protected-access, no-member render.get_module_for_descriptor( self.mock_user, request, descriptor, field_data_cache, course.id, course=course ) # check that _unwrapped_field_data is the same as the original # _field_data, but now _field_data as been reset. # pylint: disable=protected-access, no-member self.assertIs(descriptor._unwrapped_field_data, original_field_data) self.assertIsNot(descriptor._unwrapped_field_data, descriptor._field_data) # now bind this module to a few other students for user in [UserFactory(), UserFactory(), UserFactory()]: render.get_module_for_descriptor( user, request, descriptor, field_data_cache, course.id, course=course ) # _field_data should now be wrapped by LmsFieldData # pylint: disable=protected-access, no-member self.assertIsInstance(descriptor._field_data, LmsFieldData) # the LmsFieldData should now wrap OverrideFieldData self.assertIsInstance( # pylint: disable=protected-access, no-member descriptor._field_data._authored_data._source, OverrideFieldData ) # the OverrideFieldData should point to the original unwrapped field_data self.assertIs( # pylint: disable=protected-access, no-member descriptor._field_data._authored_data._source.fallback, descriptor._unwrapped_field_data ) def test_hash_resource(self): """ Ensure that the resource hasher works and does not fail on unicode, decoded or otherwise. """ resources = ['ASCII text', u'❄ I am a special snowflake.', "❄ So am I, but I didn't tell you."] self.assertEqual(hash_resource(resources), 'a76e27c8e80ca3efd7ce743093aa59e0') @attr('shard_1') class TestHandleXBlockCallback(SharedModuleStoreTestCase, LoginEnrollmentTestCase): """ Test the handle_xblock_callback function """ @classmethod def setUpClass(cls): super(TestHandleXBlockCallback, cls).setUpClass() cls.course_key = ToyCourseFactory.create().id cls.toy_course = modulestore().get_course(cls.course_key) def setUp(self): super(TestHandleXBlockCallback, self).setUp() self.location = self.course_key.make_usage_key('chapter', 'Overview') self.mock_user = UserFactory.create() self.request_factory = RequestFactory() # Construct a mock module for the modulestore to return self.mock_module = MagicMock() self.mock_module.id = 1 self.dispatch = 'score_update' # Construct a 'standard' xqueue_callback url self.callback_url = reverse( 'xqueue_callback', kwargs={ 'course_id': self.course_key.to_deprecated_string(), 'userid': str(self.mock_user.id), 'mod_id': self.mock_module.id, 'dispatch': self.dispatch } ) def _mock_file(self, name='file', size=10): """Create a mock file object for testing uploads""" mock_file = MagicMock( size=size, read=lambda: 'x' * size ) # We can't use `name` as a kwarg to Mock to set the name attribute # because mock uses `name` to name the mock itself mock_file.name = name return mock_file def test_invalid_location(self): request = self.request_factory.post('dummy_url', data={'position': 1}) request.user = self.mock_user with self.assertRaises(Http404): render.handle_xblock_callback( request, self.course_key.to_deprecated_string(), 'invalid Location', 'dummy_handler' 'dummy_dispatch' ) def test_too_many_files(self): request = self.request_factory.post( 'dummy_url', data={'file_id': (self._mock_file(), ) * (settings.MAX_FILEUPLOADS_PER_INPUT + 1)} ) request.user = self.mock_user self.assertEquals( render.handle_xblock_callback( request, self.course_key.to_deprecated_string(), quote_slashes(self.location.to_deprecated_string()), 'dummy_handler' ).content, json.dumps({ 'success': 'Submission aborted! Maximum %d files may be submitted at once' % settings.MAX_FILEUPLOADS_PER_INPUT }, indent=2) ) def test_too_large_file(self): inputfile = self._mock_file(size=1 + settings.STUDENT_FILEUPLOAD_MAX_SIZE) request = self.request_factory.post( 'dummy_url', data={'file_id': inputfile} ) request.user = self.mock_user self.assertEquals( render.handle_xblock_callback( request, self.course_key.to_deprecated_string(), quote_slashes(self.location.to_deprecated_string()), 'dummy_handler' ).content, json.dumps({ 'success': 'Submission aborted! Your file "%s" is too large (max size: %d MB)' % (inputfile.name, settings.STUDENT_FILEUPLOAD_MAX_SIZE / (1000 ** 2)) }, indent=2) ) def test_xmodule_dispatch(self): request = self.request_factory.post('dummy_url', data={'position': 1}) request.user = self.mock_user response = render.handle_xblock_callback( request, self.course_key.to_deprecated_string(), quote_slashes(self.location.to_deprecated_string()), 'xmodule_handler', 'goto_position', ) self.assertIsInstance(response, HttpResponse) def test_bad_course_id(self): request = self.request_factory.post('dummy_url') request.user = self.mock_user with self.assertRaises(Http404): render.handle_xblock_callback( request, 'bad_course_id', quote_slashes(self.location.to_deprecated_string()), 'xmodule_handler', 'goto_position', ) def test_bad_location(self): request = self.request_factory.post('dummy_url') request.user = self.mock_user with self.assertRaises(Http404): render.handle_xblock_callback( request, self.course_key.to_deprecated_string(), quote_slashes(self.course_key.make_usage_key('chapter', 'bad_location').to_deprecated_string()), 'xmodule_handler', 'goto_position', ) def test_bad_xmodule_dispatch(self): request = self.request_factory.post('dummy_url') request.user = self.mock_user with self.assertRaises(Http404): render.handle_xblock_callback( request, self.course_key.to_deprecated_string(), quote_slashes(self.location.to_deprecated_string()), 'xmodule_handler', 'bad_dispatch', ) def test_missing_handler(self): request = self.request_factory.post('dummy_url') request.user = self.mock_user with self.assertRaises(Http404): render.handle_xblock_callback( request, self.course_key.to_deprecated_string(), quote_slashes(self.location.to_deprecated_string()), 'bad_handler', 'bad_dispatch', ) @XBlock.register_temp_plugin(GradedStatelessXBlock, identifier='stateless_scorer') def test_score_without_student_state(self): course = CourseFactory.create() block = ItemFactory.create(category='stateless_scorer', parent=course) request = self.request_factory.post( 'dummy_url', data=json.dumps({"grade": 0.75}), content_type='application/json' ) request.user = self.mock_user response = render.handle_xblock_callback( request, unicode(course.id), quote_slashes(unicode(block.scope_ids.usage_id)), 'set_score', '', ) self.assertEquals(response.status_code, 200) student_module = StudentModule.objects.get( student=self.mock_user, module_state_key=block.scope_ids.usage_id, ) self.assertEquals(student_module.grade, 0.75) self.assertEquals(student_module.max_grade, 1) @patch.dict('django.conf.settings.FEATURES', {'ENABLE_XBLOCK_VIEW_ENDPOINT': True}) def test_xblock_view_handler(self): args = [ 'edX/toy/2012_Fall', quote_slashes('i4x://edX/toy/videosequence/Toy_Videos'), 'student_view' ] xblock_view_url = reverse( 'xblock_view', args=args ) request = self.request_factory.get(xblock_view_url) request.user = self.mock_user response = render.xblock_view(request, *args) self.assertEquals(200, response.status_code) expected = ['csrf_token', 'html', 'resources'] content = json.loads(response.content) for section in expected: self.assertIn(section, content) doc = PyQuery(content['html']) self.assertEquals(len(doc('div.xblock-student_view-videosequence')), 1) @attr('shard_1') @ddt.ddt class TestTOC(ModuleStoreTestCase): """Check the Table of Contents for a course""" def setup_request_and_course(self, num_finds, num_sends): """ Sets up the toy course in the modulestore and the request object. """ self.course_key = ToyCourseFactory.create().id # pylint: disable=attribute-defined-outside-init self.chapter = 'Overview' chapter_url = '%s/%s/%s' % ('/courses', self.course_key, self.chapter) factory = RequestFactory() self.request = factory.get(chapter_url) self.request.user = UserFactory() self.modulestore = self.store._get_modulestore_for_courselike(self.course_key) # pylint: disable=protected-access, attribute-defined-outside-init with self.modulestore.bulk_operations(self.course_key): with check_mongo_calls(num_finds, num_sends): self.toy_course = self.store.get_course(self.course_key, depth=2) # pylint: disable=attribute-defined-outside-init self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course_key, self.request.user, self.toy_course, depth=2 ) # Mongo makes 3 queries to load the course to depth 2: # - 1 for the course # - 1 for its children # - 1 for its grandchildren # Split makes 6 queries to load the course to depth 2: # - load the structure # - load 5 definitions # Split makes 5 queries to render the toc: # - it loads the active version at the start of the bulk operation # - it loads 4 definitions, because it instantiates 4 VideoModules # each of which access a Scope.content field in __init__ @ddt.data((ModuleStoreEnum.Type.mongo, 3, 0, 0), (ModuleStoreEnum.Type.split, 6, 0, 5)) @ddt.unpack def test_toc_toy_from_chapter(self, default_ms, setup_finds, setup_sends, toc_finds): with self.store.default_store(default_ms): self.setup_request_and_course(setup_finds, setup_sends) expected = ([{'active': True, 'sections': [{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True, 'format': u'Lecture Sequence', 'due': None, 'active': False}, {'url_name': 'Welcome', 'display_name': u'Welcome', 'graded': True, 'format': '', 'due': None, 'active': False}, {'url_name': 'video_123456789012', 'display_name': 'Test Video', 'graded': True, 'format': '', 'due': None, 'active': False}, {'url_name': 'video_4f66f493ac8f', 'display_name': 'Video', 'graded': True, 'format': '', 'due': None, 'active': False}], 'url_name': 'Overview', 'display_name': u'Overview', 'display_id': u'overview'}, {'active': False, 'sections': [{'url_name': 'toyvideo', 'display_name': 'toyvideo', 'graded': True, 'format': '', 'due': None, 'active': False}], 'url_name': 'secret:magic', 'display_name': 'secret:magic', 'display_id': 'secretmagic'}]) course = self.store.get_course(self.toy_course.id, depth=2) with check_mongo_calls(toc_finds): actual = render.toc_for_course( self.request.user, self.request, course, self.chapter, None, self.field_data_cache ) for toc_section in expected: self.assertIn(toc_section, actual['chapters']) self.assertIsNone(actual['previous_of_active_section']) self.assertIsNone(actual['next_of_active_section']) # Mongo makes 3 queries to load the course to depth 2: # - 1 for the course # - 1 for its children # - 1 for its grandchildren # Split makes 6 queries to load the course to depth 2: # - load the structure # - load 5 definitions # Split makes 5 queries to render the toc: # - it loads the active version at the start of the bulk operation # - it loads 4 definitions, because it instantiates 4 VideoModules # each of which access a Scope.content field in __init__ @ddt.data((ModuleStoreEnum.Type.mongo, 3, 0, 0), (ModuleStoreEnum.Type.split, 6, 0, 5)) @ddt.unpack def test_toc_toy_from_section(self, default_ms, setup_finds, setup_sends, toc_finds): with self.store.default_store(default_ms): self.setup_request_and_course(setup_finds, setup_sends) section = 'Welcome' expected = ([{'active': True, 'sections': [{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True, 'format': u'Lecture Sequence', 'due': None, 'active': False}, {'url_name': 'Welcome', 'display_name': u'Welcome', 'graded': True, 'format': '', 'due': None, 'active': True}, {'url_name': 'video_123456789012', 'display_name': 'Test Video', 'graded': True, 'format': '', 'due': None, 'active': False}, {'url_name': 'video_4f66f493ac8f', 'display_name': 'Video', 'graded': True, 'format': '', 'due': None, 'active': False}], 'url_name': 'Overview', 'display_name': u'Overview', 'display_id': u'overview'}, {'active': False, 'sections': [{'url_name': 'toyvideo', 'display_name': 'toyvideo', 'graded': True, 'format': '', 'due': None, 'active': False}], 'url_name': 'secret:magic', 'display_name': 'secret:magic', 'display_id': 'secretmagic'}]) with check_mongo_calls(toc_finds): actual = render.toc_for_course( self.request.user, self.request, self.toy_course, self.chapter, section, self.field_data_cache ) for toc_section in expected: self.assertIn(toc_section, actual['chapters']) self.assertEquals(actual['previous_of_active_section']['url_name'], 'Toy_Videos') self.assertEquals(actual['next_of_active_section']['url_name'], 'video_123456789012') @attr('shard_1') @ddt.ddt @patch.dict('django.conf.settings.FEATURES', {'ENABLE_SPECIAL_EXAMS': True}) class TestProctoringRendering(SharedModuleStoreTestCase): @classmethod def setUpClass(cls): super(TestProctoringRendering, cls).setUpClass() cls.course_key = ToyCourseFactory.create().id """Check the Table of Contents for a course""" def setUp(self): """ Set up the initial mongo datastores """ super(TestProctoringRendering, self).setUp() self.chapter = 'Overview' chapter_url = '%s/%s/%s' % ('/courses', self.course_key, self.chapter) factory = RequestFactory() self.request = factory.get(chapter_url) self.request.user = UserFactory.create() self.user = UserFactory.create() self.modulestore = self.store._get_modulestore_for_courselike(self.course_key) # pylint: disable=protected-access with self.modulestore.bulk_operations(self.course_key): self.toy_course = self.store.get_course(self.course_key, depth=2) self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course_key, self.request.user, self.toy_course, depth=2 ) @ddt.data( (CourseMode.DEFAULT_MODE_SLUG, False, None, None), ( CourseMode.DEFAULT_MODE_SLUG, True, 'eligible', { 'status': 'eligible', 'short_description': 'Ungraded Practice Exam', 'suggested_icon': '', 'in_completed_state': False } ), ( CourseMode.DEFAULT_MODE_SLUG, True, 'submitted', { 'status': 'submitted', 'short_description': 'Practice Exam Completed', 'suggested_icon': 'fa-check', 'in_completed_state': True } ), ( CourseMode.DEFAULT_MODE_SLUG, True, 'error', { 'status': 'error', 'short_description': 'Practice Exam Failed', 'suggested_icon': 'fa-exclamation-triangle', 'in_completed_state': True } ), ( CourseMode.VERIFIED, False, None, { 'status': 'eligible', 'short_description': 'Proctored Option Available', 'suggested_icon': 'fa-pencil-square-o', 'in_completed_state': False } ), ( CourseMode.VERIFIED, False, 'declined', { 'status': 'declined', 'short_description': 'Taking As Open Exam', 'suggested_icon': 'fa-pencil-square-o', 'in_completed_state': False } ), ( CourseMode.VERIFIED, False, 'submitted', { 'status': 'submitted', 'short_description': 'Pending Session Review', 'suggested_icon': 'fa-spinner fa-spin', 'in_completed_state': True } ), ( CourseMode.VERIFIED, False, 'verified', { 'status': 'verified', 'short_description': 'Passed Proctoring', 'suggested_icon': 'fa-check', 'in_completed_state': True } ), ( CourseMode.VERIFIED, False, 'rejected', { 'status': 'rejected', 'short_description': 'Failed Proctoring', 'suggested_icon': 'fa-exclamation-triangle', 'in_completed_state': True } ), ( CourseMode.VERIFIED, False, 'error', { 'status': 'error', 'short_description': 'Failed Proctoring', 'suggested_icon': 'fa-exclamation-triangle', 'in_completed_state': True } ), ) @ddt.unpack def test_proctored_exam_toc(self, enrollment_mode, is_practice_exam, attempt_status, expected): """ Generate TOC for a course with a single chapter/sequence which contains proctored exam """ self._setup_test_data(enrollment_mode, is_practice_exam, attempt_status) actual = render.toc_for_course( self.request.user, self.request, self.toy_course, self.chapter, 'Toy_Videos', self.field_data_cache ) section_actual = self._find_section(actual['chapters'], 'Overview', 'Toy_Videos') if expected: self.assertIn(expected, [section_actual['proctoring']]) else: # we expect there not to be a 'proctoring' key in the dict self.assertNotIn('proctoring', section_actual) self.assertIsNone(actual['previous_of_active_section']) self.assertEquals(actual['next_of_active_section']['url_name'], u"Welcome") @ddt.data( ( CourseMode.DEFAULT_MODE_SLUG, True, None, 'Try a proctored exam', True ), ( CourseMode.DEFAULT_MODE_SLUG, True, 'submitted', 'You have submitted this practice proctored exam', False ), ( CourseMode.DEFAULT_MODE_SLUG, True, 'error', 'There was a problem with your practice proctoring session', True ), ( CourseMode.VERIFIED, False, None, 'This exam is proctored', False ), ( CourseMode.VERIFIED, False, 'submitted', 'You have submitted this proctored exam for review', True ), ( CourseMode.VERIFIED, False, 'verified', 'Your proctoring session was reviewed and passed all requirements', False ), ( CourseMode.VERIFIED, False, 'rejected', 'Your proctoring session was reviewed and did not pass requirements', True ), ( CourseMode.VERIFIED, False, 'error', 'There was a problem with your proctoring session', False ), ) @ddt.unpack def test_render_proctored_exam(self, enrollment_mode, is_practice_exam, attempt_status, expected, with_credit_context): """ Verifies gated content from the student view rendering of a sequence this is labeled as a proctored exam """ usage_key = self._setup_test_data(enrollment_mode, is_practice_exam, attempt_status) # initialize some credit requirements, if so then specify if with_credit_context: credit_course = CreditCourse(course_key=self.course_key, enabled=True) credit_course.save() set_credit_requirements( self.course_key, [ { 'namespace': 'reverification', 'name': 'reverification-1', 'display_name': 'ICRV1', 'criteria': {}, }, { 'namespace': 'proctored-exam', 'name': 'Exam1', 'display_name': 'A Proctored Exam', 'criteria': {} } ] ) set_credit_requirement_status( self.request.user.username, self.course_key, 'reverification', 'ICRV1' ) module = render.get_module( self.request.user, self.request, usage_key, self.field_data_cache, wrap_xmodule_display=True, ) content = module.render(STUDENT_VIEW).content self.assertIn(expected, content) def _setup_test_data(self, enrollment_mode, is_practice_exam, attempt_status): """ Helper method to consolidate some courseware/proctoring/credit test harness data """ usage_key = self.course_key.make_usage_key('videosequence', 'Toy_Videos') sequence = self.modulestore.get_item(usage_key) sequence.is_time_limited = True sequence.is_proctored_exam = True sequence.is_practice_exam = is_practice_exam self.modulestore.update_item(sequence, self.user.id) self.toy_course = self.modulestore.get_course(self.course_key) # refresh cache after update self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course_key, self.request.user, self.toy_course, depth=2 ) set_runtime_service( 'credit', MockCreditService(enrollment_mode=enrollment_mode) ) exam_id = create_exam( course_id=unicode(self.course_key), content_id=unicode(sequence.location), exam_name='foo', time_limit_mins=10, is_proctored=True, is_practice_exam=is_practice_exam ) if attempt_status: create_exam_attempt(exam_id, self.request.user.id, taking_as_proctored=True) update_attempt_status(exam_id, self.request.user.id, attempt_status) return usage_key def _find_url_name(self, toc, url_name): """ Helper to return the dict TOC section associated with a Chapter of url_name """ for entry in toc: if entry['url_name'] == url_name: return entry return None def _find_section(self, toc, chapter_url_name, section_url_name): """ Helper to return the dict TOC section associated with a section of url_name """ chapter = self._find_url_name(toc, chapter_url_name) if chapter: return self._find_url_name(chapter['sections'], section_url_name) return None @attr('shard_1') class TestGatedSubsectionRendering(SharedModuleStoreTestCase, MilestonesTestCaseMixin): @classmethod def setUpClass(cls): super(TestGatedSubsectionRendering, cls).setUpClass() cls.course = CourseFactory.create() cls.course.enable_subsection_gating = True cls.course.save() cls.store.update_item(cls.course, 0) """ Test the toc for a course is rendered correctly when there is gated content """ def setUp(self): """ Set up the initial test data """ super(TestGatedSubsectionRendering, self).setUp() self.chapter = ItemFactory.create( parent=self.course, category="chapter", display_name="Chapter" ) self.open_seq = ItemFactory.create( parent=self.chapter, category='sequential', display_name="Open Sequential" ) self.gated_seq = ItemFactory.create( parent=self.chapter, category='sequential', display_name="Gated Sequential" ) self.request = RequestFactory().get('%s/%s/%s' % ('/courses', self.course.id, self.chapter.display_name)) self.request.user = UserFactory() self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course.id, self.request.user, self.course, depth=2 ) gating_api.add_prerequisite(self.course.id, self.open_seq.location) gating_api.set_required_content(self.course.id, self.gated_seq.location, self.open_seq.location, 100) def _find_url_name(self, toc, url_name): """ Helper to return the TOC section associated with url_name """ for entry in toc: if entry['url_name'] == url_name: return entry return None def _find_sequential(self, toc, chapter_url_name, sequential_url_name): """ Helper to return the sequential associated with sequential_url_name """ chapter = self._find_url_name(toc, chapter_url_name) if chapter: return self._find_url_name(chapter['sections'], sequential_url_name) return None def test_toc_with_gated_sequential(self): """ Test generation of TOC for a course with a gated subsection """ actual = render.toc_for_course( self.request.user, self.request, self.course, self.chapter.display_name, self.open_seq.display_name, self.field_data_cache ) self.assertIsNotNone(self._find_sequential(actual['chapters'], 'Chapter', 'Open_Sequential')) self.assertIsNone(self._find_sequential(actual['chapters'], 'Chapter', 'Gated_Sequential')) self.assertIsNone(self._find_sequential(actual['chapters'], 'Non-existent_Chapter', 'Non-existent_Sequential')) self.assertIsNone(actual['previous_of_active_section']) self.assertIsNone(actual['next_of_active_section']) @attr('shard_1') @ddt.ddt class TestHtmlModifiers(ModuleStoreTestCase): """ Tests to verify that standard modifications to the output of XModule/XBlock student_view are taking place """ def setUp(self): super(TestHtmlModifiers, self).setUp() self.course = CourseFactory.create() self.request = RequestFactory().get('/') self.request.user = self.user self.request.session = {} self.content_string = '<p>This is the content<p>' self.rewrite_link = '<a href="/static/foo/content">Test rewrite</a>' self.rewrite_bad_link = '<img src="/static//file.jpg" />' self.course_link = '<a href="/course/bar/content">Test course rewrite</a>' self.descriptor = ItemFactory.create( category='html', data=self.content_string + self.rewrite_link + self.rewrite_bad_link + self.course_link ) self.location = self.descriptor.location self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course.id, self.user, self.descriptor ) def test_xmodule_display_wrapper_enabled(self): module = render.get_module( self.user, self.request, self.location, self.field_data_cache, wrap_xmodule_display=True, ) result_fragment = module.render(STUDENT_VIEW) self.assertEquals(len(PyQuery(result_fragment.content)('div.xblock.xblock-student_view.xmodule_HtmlModule')), 1) def test_xmodule_display_wrapper_disabled(self): module = render.get_module( self.user, self.request, self.location, self.field_data_cache, wrap_xmodule_display=False, ) result_fragment = module.render(STUDENT_VIEW) self.assertNotIn('div class="xblock xblock-student_view xmodule_display xmodule_HtmlModule"', result_fragment.content) def test_static_link_rewrite(self): module = render.get_module( self.user, self.request, self.location, self.field_data_cache, ) result_fragment = module.render(STUDENT_VIEW) self.assertIn( '/c4x/{org}/{course}/asset/foo_content'.format( org=self.course.location.org, course=self.course.location.course, ), result_fragment.content ) def test_static_badlink_rewrite(self): module = render.get_module( self.user, self.request, self.location, self.field_data_cache, ) result_fragment = module.render(STUDENT_VIEW) self.assertIn( '/c4x/{org}/{course}/asset/file.jpg'.format( org=self.course.location.org, course=self.course.location.course, ), result_fragment.content ) def test_static_asset_path_use(self): ''' when a course is loaded with do_import_static=False (see xml_importer.py), then static_asset_path is set as an lms kv in course. That should make static paths not be mangled (ie not changed to c4x://). ''' module = render.get_module( self.user, self.request, self.location, self.field_data_cache, static_asset_path="toy_course_dir", ) result_fragment = module.render(STUDENT_VIEW) self.assertIn('href="/static/toy_course_dir', result_fragment.content) def test_course_image(self): url = course_image_url(self.course) self.assertTrue(url.startswith('/c4x/')) self.course.static_asset_path = "toy_course_dir" url = course_image_url(self.course) self.assertTrue(url.startswith('/static/toy_course_dir/')) self.course.static_asset_path = "" @override_settings(DEFAULT_COURSE_ABOUT_IMAGE_URL='test.png') @override_settings(STATIC_URL='static/') @ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) def test_course_image_for_split_course(self, store): """ for split courses if course_image is empty then course_image_url will be the default image url defined in settings """ self.course = CourseFactory.create(default_store=store) self.course.course_image = '' url = course_image_url(self.course) self.assertEqual('static/test.png', url) def test_get_course_info_section(self): self.course.static_asset_path = "toy_course_dir" get_course_info_section(self.request, self.request.user, self.course, "handouts") # NOTE: check handouts output...right now test course seems to have no such content # at least this makes sure get_course_info_section returns without exception def test_course_link_rewrite(self): module = render.get_module( self.user, self.request, self.location, self.field_data_cache, ) result_fragment = module.render(STUDENT_VIEW) self.assertIn( '/courses/{course_id}/bar/content'.format( course_id=self.course.id.to_deprecated_string() ), result_fragment.content ) class XBlockWithJsonInitData(XBlock): """ Pure XBlock to use in tests, with JSON init data. """ the_json_data = None def student_view(self, context=None): # pylint: disable=unused-argument """ A simple view that returns just enough to test. """ frag = Fragment(u"Hello there!") frag.add_javascript(u'alert("Hi!");') frag.initialize_js('ThumbsBlock', self.the_json_data) return frag @attr('shard_1') @ddt.ddt class JsonInitDataTest(ModuleStoreTestCase): """Tests for JSON data injected into the JS init function.""" @ddt.data( ({'a': 17}, '''{"a": 17}'''), ({'xss': '</script>alert("XSS")'}, r'''{"xss": "<\/script>alert(\"XSS\")"}'''), ) @ddt.unpack @XBlock.register_temp_plugin(XBlockWithJsonInitData, identifier='withjson') def test_json_init_data(self, json_data, json_output): XBlockWithJsonInitData.the_json_data = json_data mock_user = UserFactory() mock_request = MagicMock() mock_request.user = mock_user course = CourseFactory() descriptor = ItemFactory(category='withjson', parent=course) field_data_cache = FieldDataCache([course, descriptor], course.id, mock_user) # pylint: disable=no-member module = render.get_module_for_descriptor( mock_user, mock_request, descriptor, field_data_cache, course.id, # pylint: disable=no-member course=course ) html = module.render(STUDENT_VIEW).content self.assertIn(json_output, html) # No matter what data goes in, there should only be one close-script tag. self.assertEqual(html.count("</script>"), 1) class ViewInStudioTest(ModuleStoreTestCase): """Tests for the 'View in Studio' link visiblity.""" def setUp(self): """ Set up the user and request that will be used. """ super(ViewInStudioTest, self).setUp() self.staff_user = GlobalStaffFactory.create() self.request = RequestFactory().get('/') self.request.user = self.staff_user self.request.session = {} self.module = None self.default_context = {'bookmarked': False, 'username': self.user.username} def _get_module(self, course_id, descriptor, location): """ Get the module from the course from which to pattern match (or not) the 'View in Studio' buttons """ field_data_cache = FieldDataCache.cache_for_descriptor_descendents( course_id, self.staff_user, descriptor ) return render.get_module( self.staff_user, self.request, location, field_data_cache, ) def setup_mongo_course(self, course_edit_method='Studio'): """ Create a mongo backed course. """ course = CourseFactory.create( course_edit_method=course_edit_method ) descriptor = ItemFactory.create( category='vertical', parent_location=course.location, ) child_descriptor = ItemFactory.create( category='vertical', parent_location=descriptor.location ) self.module = self._get_module(course.id, descriptor, descriptor.location) # pylint: disable=attribute-defined-outside-init self.child_module = self._get_module(course.id, child_descriptor, child_descriptor.location) @attr('shard_1') class MongoViewInStudioTest(ViewInStudioTest): """Test the 'View in Studio' link visibility in a mongo backed course.""" def test_view_in_studio_link_studio_course(self): """Regular Studio courses should see 'View in Studio' links.""" self.setup_mongo_course() result_fragment = self.module.render(STUDENT_VIEW, context=self.default_context) self.assertIn('View Unit in Studio', result_fragment.content) def test_view_in_studio_link_only_in_top_level_vertical(self): """Regular Studio courses should not see 'View in Studio' for child verticals of verticals.""" self.setup_mongo_course() # Render the parent vertical, then check that there is only a single "View Unit in Studio" link. result_fragment = self.module.render(STUDENT_VIEW, context=self.default_context) # The single "View Unit in Studio" link should appear before the first xmodule vertical definition. parts = result_fragment.content.split('data-block-type="vertical"') self.assertEqual(3, len(parts), "Did not find two vertical blocks") self.assertIn('View Unit in Studio', parts[0]) self.assertNotIn('View Unit in Studio', parts[1]) self.assertNotIn('View Unit in Studio', parts[2]) def test_view_in_studio_link_xml_authored(self): """Courses that change 'course_edit_method' setting can hide 'View in Studio' links.""" self.setup_mongo_course(course_edit_method='XML') result_fragment = self.module.render(STUDENT_VIEW, context=self.default_context) self.assertNotIn('View Unit in Studio', result_fragment.content) @attr('shard_1') class MixedViewInStudioTest(ViewInStudioTest): """Test the 'View in Studio' link visibility in a mixed mongo backed course.""" MODULESTORE = TEST_DATA_MIXED_MODULESTORE def test_view_in_studio_link_mongo_backed(self): """Mixed mongo courses that are mongo backed should see 'View in Studio' links.""" self.setup_mongo_course() result_fragment = self.module.render(STUDENT_VIEW, context=self.default_context) self.assertIn('View Unit in Studio', result_fragment.content) def test_view_in_studio_link_xml_authored(self): """Courses that change 'course_edit_method' setting can hide 'View in Studio' links.""" self.setup_mongo_course(course_edit_method='XML') result_fragment = self.module.render(STUDENT_VIEW, context=self.default_context) self.assertNotIn('View Unit in Studio', result_fragment.content) @XBlock.tag("detached") class DetachedXBlock(XBlock): """ XBlock marked with the 'detached' flag. """ def student_view(self, context=None): # pylint: disable=unused-argument """ A simple view that returns just enough to test. """ frag = Fragment(u"Hello there!") return frag @attr('shard_1') @patch.dict('django.conf.settings.FEATURES', {'DISPLAY_DEBUG_INFO_TO_STAFF': True, 'DISPLAY_HISTOGRAMS_TO_STAFF': True}) @patch('courseware.module_render.has_access', Mock(return_value=True, autospec=True)) class TestStaffDebugInfo(SharedModuleStoreTestCase): """Tests to verify that Staff Debug Info panel and histograms are displayed to staff.""" @classmethod def setUpClass(cls): super(TestStaffDebugInfo, cls).setUpClass() cls.course = CourseFactory.create() def setUp(self): super(TestStaffDebugInfo, self).setUp() self.user = UserFactory.create() self.request = RequestFactory().get('/') self.request.user = self.user self.request.session = {} problem_xml = OptionResponseXMLFactory().build_xml( question_text='The correct answer is Correct', num_inputs=2, weight=2, options=['Correct', 'Incorrect'], correct_option='Correct' ) self.descriptor = ItemFactory.create( category='problem', data=problem_xml, display_name='Option Response Problem' ) self.location = self.descriptor.location self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course.id, self.user, self.descriptor ) @patch.dict('django.conf.settings.FEATURES', {'DISPLAY_DEBUG_INFO_TO_STAFF': False}) def test_staff_debug_info_disabled(self): module = render.get_module( self.user, self.request, self.location, self.field_data_cache, ) result_fragment = module.render(STUDENT_VIEW) self.assertNotIn('Staff Debug', result_fragment.content) def test_staff_debug_info_enabled(self): module = render.get_module( self.user, self.request, self.location, self.field_data_cache, ) result_fragment = module.render(STUDENT_VIEW) self.assertIn('Staff Debug', result_fragment.content) @XBlock.register_temp_plugin(DetachedXBlock, identifier='detached-block') def test_staff_debug_info_disabled_for_detached_blocks(self): """Staff markup should not be present on detached blocks.""" descriptor = ItemFactory.create( category='detached-block', display_name='Detached Block' ) field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course.id, self.user, descriptor ) module = render.get_module( self.user, self.request, descriptor.location, field_data_cache, ) result_fragment = module.render(STUDENT_VIEW) self.assertNotIn('Staff Debug', result_fragment.content) @patch.dict('django.conf.settings.FEATURES', {'DISPLAY_HISTOGRAMS_TO_STAFF': False}) def test_histogram_disabled(self): module = render.get_module( self.user, self.request, self.location, self.field_data_cache, ) result_fragment = module.render(STUDENT_VIEW) self.assertNotIn('histrogram', result_fragment.content) def test_histogram_enabled_for_unscored_xmodules(self): """Histograms should not display for xmodules which are not scored.""" html_descriptor = ItemFactory.create( category='html', data='Here are some course details.' ) field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course.id, self.user, self.descriptor ) with patch('openedx.core.lib.xblock_utils.grade_histogram') as mock_grade_histogram: mock_grade_histogram.return_value = [] module = render.get_module( self.user, self.request, html_descriptor.location, field_data_cache, ) module.render(STUDENT_VIEW) self.assertFalse(mock_grade_histogram.called) def test_histogram_enabled_for_scored_xmodules(self): """Histograms should display for xmodules which are scored.""" StudentModuleFactory.create( course_id=self.course.id, module_state_key=self.location, student=UserFactory(), grade=1, max_grade=1, state="{}", ) with patch('openedx.core.lib.xblock_utils.grade_histogram') as mock_grade_histogram: mock_grade_histogram.return_value = [] module = render.get_module( self.user, self.request, self.location, self.field_data_cache, ) module.render(STUDENT_VIEW) self.assertTrue(mock_grade_histogram.called) PER_COURSE_ANONYMIZED_DESCRIPTORS = (LTIDescriptor, ) # The "set" here is to work around the bug that load_classes returns duplicates for multiply-delcared classes. PER_STUDENT_ANONYMIZED_DESCRIPTORS = set( class_ for (name, class_) in XModuleDescriptor.load_classes() if not issubclass(class_, PER_COURSE_ANONYMIZED_DESCRIPTORS) ) @attr('shard_1') @ddt.ddt class TestAnonymousStudentId(SharedModuleStoreTestCase, LoginEnrollmentTestCase): """ Test that anonymous_student_id is set correctly across a variety of XBlock types """ @classmethod def setUpClass(cls): super(TestAnonymousStudentId, cls).setUpClass() cls.course_key = ToyCourseFactory.create().id cls.course = modulestore().get_course(cls.course_key) def setUp(self): super(TestAnonymousStudentId, self).setUp() self.user = UserFactory() @patch('courseware.module_render.has_access', Mock(return_value=True, autospec=True)) def _get_anonymous_id(self, course_id, xblock_class): location = course_id.make_usage_key('dummy_category', 'dummy_name') descriptor = Mock( spec=xblock_class, _field_data=Mock(spec=FieldData, name='field_data'), location=location, static_asset_path=None, _runtime=Mock( spec=Runtime, resources_fs=None, mixologist=Mock(_mixins=(), name='mixologist'), name='runtime', ), scope_ids=Mock(spec=ScopeIds), name='descriptor', _field_data_cache={}, _dirty_fields={}, fields={}, days_early_for_beta=None, ) descriptor.runtime = CombinedSystem(descriptor._runtime, None) # pylint: disable=protected-access # Use the xblock_class's bind_for_student method descriptor.bind_for_student = partial(xblock_class.bind_for_student, descriptor) if hasattr(xblock_class, 'module_class'): descriptor.module_class = xblock_class.module_class return render.get_module_for_descriptor_internal( user=self.user, descriptor=descriptor, student_data=Mock(spec=FieldData, name='student_data'), course_id=course_id, track_function=Mock(name='track_function'), # Track Function xqueue_callback_url_prefix=Mock(name='xqueue_callback_url_prefix'), # XQueue Callback Url Prefix request_token='request_token', course=self.course, ).xmodule_runtime.anonymous_student_id @ddt.data(*PER_STUDENT_ANONYMIZED_DESCRIPTORS) def test_per_student_anonymized_id(self, descriptor_class): for course_id in ('MITx/6.00x/2012_Fall', 'MITx/6.00x/2013_Spring'): self.assertEquals( # This value is set by observation, so that later changes to the student # id computation don't break old data '5afe5d9bb03796557ee2614f5c9611fb', self._get_anonymous_id(CourseKey.from_string(course_id), descriptor_class) ) @ddt.data(*PER_COURSE_ANONYMIZED_DESCRIPTORS) def test_per_course_anonymized_id(self, descriptor_class): self.assertEquals( # This value is set by observation, so that later changes to the student # id computation don't break old data 'e3b0b940318df9c14be59acb08e78af5', self._get_anonymous_id(SlashSeparatedCourseKey('MITx', '6.00x', '2012_Fall'), descriptor_class) ) self.assertEquals( # This value is set by observation, so that later changes to the student # id computation don't break old data 'f82b5416c9f54b5ce33989511bb5ef2e', self._get_anonymous_id(SlashSeparatedCourseKey('MITx', '6.00x', '2013_Spring'), descriptor_class) ) @attr('shard_1') @patch('track.views.tracker', autospec=True) class TestModuleTrackingContext(SharedModuleStoreTestCase): """ Ensure correct tracking information is included in events emitted during XBlock callback handling. """ @classmethod def setUpClass(cls): super(TestModuleTrackingContext, cls).setUpClass() cls.course = CourseFactory.create() def setUp(self): super(TestModuleTrackingContext, self).setUp() self.user = UserFactory.create() self.request = RequestFactory().get('/') self.request.user = self.user self.request.session = {} self.course = CourseFactory.create() self.problem_xml = OptionResponseXMLFactory().build_xml( question_text='The correct answer is Correct', num_inputs=2, weight=2, options=['Correct', 'Incorrect'], correct_option='Correct' ) def test_context_contains_display_name(self, mock_tracker): problem_display_name = u'Option Response Problem' module_info = self.handle_callback_and_get_module_info(mock_tracker, problem_display_name) self.assertEquals(problem_display_name, module_info['display_name']) @XBlockAside.register_temp_plugin(AsideTestType, 'test_aside') @patch('xmodule.modulestore.mongo.base.CachingDescriptorSystem.applicable_aside_types', lambda self, block: ['test_aside']) @patch('lms.djangoapps.lms_xblock.runtime.LmsModuleSystem.applicable_aside_types', lambda self, block: ['test_aside']) def test_context_contains_aside_info(self, mock_tracker): """ Check that related xblock asides populate information in the 'problem_check' event in case the 'get_event_context' method is exist """ problem_display_name = u'Test Problem' def get_event_context(self, event_type, event): # pylint: disable=unused-argument """ This method return data that should be associated with the "check_problem" event """ return {'content': 'test1', 'data_field': 'test2'} AsideTestType.get_event_context = get_event_context context_info = self.handle_callback_and_get_context_info(mock_tracker, problem_display_name) self.assertIn('asides', context_info) self.assertIn('test_aside', context_info['asides']) self.assertIn('content', context_info['asides']['test_aside']) self.assertEquals(context_info['asides']['test_aside']['content'], 'test1') self.assertIn('data_field', context_info['asides']['test_aside']) self.assertEquals(context_info['asides']['test_aside']['data_field'], 'test2') def handle_callback_and_get_context_info(self, mock_tracker, problem_display_name=None): """ Creates a fake module, invokes the callback and extracts the 'context' metadata from the emitted problem_check event. """ descriptor_kwargs = { 'category': 'problem', 'data': self.problem_xml } if problem_display_name: descriptor_kwargs['display_name'] = problem_display_name descriptor = ItemFactory.create(**descriptor_kwargs) render.handle_xblock_callback( self.request, self.course.id.to_deprecated_string(), quote_slashes(descriptor.location.to_deprecated_string()), 'xmodule_handler', 'problem_check', ) self.assertEquals(len(mock_tracker.send.mock_calls), 1) mock_call = mock_tracker.send.mock_calls[0] event = mock_call[1][0] self.assertEquals(event['event_type'], 'problem_check') return event['context'] def handle_callback_and_get_module_info(self, mock_tracker, problem_display_name=None): """ Creates a fake module, invokes the callback and extracts the 'module' metadata from the emitted problem_check event. """ event = self.handle_callback_and_get_context_info(mock_tracker, problem_display_name) return event['module'] def test_missing_display_name(self, mock_tracker): actual_display_name = self.handle_callback_and_get_module_info(mock_tracker)['display_name'] self.assertTrue(actual_display_name.startswith('problem')) def test_library_source_information(self, mock_tracker): """ Check that XBlocks that are inherited from a library include the information about their library block source in events. We patch the modulestore to avoid having to create a library. """ original_usage_key = UsageKey.from_string(u'block-v1:A+B+C+type@problem+block@abcd1234') original_usage_version = ObjectId() mock_get_original_usage = lambda _, key: (original_usage_key, original_usage_version) with patch('xmodule.modulestore.mixed.MixedModuleStore.get_block_original_usage', mock_get_original_usage): module_info = self.handle_callback_and_get_module_info(mock_tracker) self.assertIn('original_usage_key', module_info) self.assertEqual(module_info['original_usage_key'], unicode(original_usage_key)) self.assertIn('original_usage_version', module_info) self.assertEqual(module_info['original_usage_version'], unicode(original_usage_version)) @attr('shard_1') class TestXmoduleRuntimeEvent(TestSubmittingProblems): """ Inherit from TestSubmittingProblems to get functionality that set up a course and problems structure """ def setUp(self): super(TestXmoduleRuntimeEvent, self).setUp() self.homework = self.add_graded_section_to_course('homework') self.problem = self.add_dropdown_to_section(self.homework.location, 'p1', 1) self.grade_dict = {'value': 0.18, 'max_value': 32} self.delete_dict = {'value': None, 'max_value': None} def get_module_for_user(self, user): """Helper function to get useful module at self.location in self.course_id for user""" mock_request = MagicMock() mock_request.user = user field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course.id, user, self.course, depth=2) return render.get_module( # pylint: disable=protected-access user, mock_request, self.problem.location, field_data_cache, )._xmodule def set_module_grade_using_publish(self, grade_dict): """Publish the user's grade, takes grade_dict as input""" module = self.get_module_for_user(self.student_user) module.system.publish(module, 'grade', grade_dict) return module def test_xmodule_runtime_publish(self): """Tests the publish mechanism""" self.set_module_grade_using_publish(self.grade_dict) student_module = StudentModule.objects.get(student=self.student_user, module_state_key=self.problem.location) self.assertEqual(student_module.grade, self.grade_dict['value']) self.assertEqual(student_module.max_grade, self.grade_dict['max_value']) def test_xmodule_runtime_publish_delete(self): """Test deleting the grade using the publish mechanism""" module = self.set_module_grade_using_publish(self.grade_dict) module.system.publish(module, 'grade', self.delete_dict) student_module = StudentModule.objects.get(student=self.student_user, module_state_key=self.problem.location) self.assertIsNone(student_module.grade) self.assertIsNone(student_module.max_grade) @patch('courseware.module_render.SCORE_CHANGED.send') def test_score_change_signal(self, send_mock): """Test that a Django signal is generated when a score changes""" self.set_module_grade_using_publish(self.grade_dict) expected_signal_kwargs = { 'sender': None, 'points_possible': self.grade_dict['max_value'], 'points_earned': self.grade_dict['value'], 'user_id': self.student_user.id, 'course_id': unicode(self.course.id), 'usage_id': unicode(self.problem.location) } send_mock.assert_called_with(**expected_signal_kwargs) @attr('shard_1') class TestRebindModule(TestSubmittingProblems): """ Tests to verify the functionality of rebinding a module. Inherit from TestSubmittingProblems to get functionality that set up a course structure """ def setUp(self): super(TestRebindModule, self).setUp() self.homework = self.add_graded_section_to_course('homework') self.lti = ItemFactory.create(category='lti', parent=self.homework) self.problem = ItemFactory.create(category='problem', parent=self.homework) self.user = UserFactory.create() self.anon_user = AnonymousUser() def get_module_for_user(self, user, item=None): """Helper function to get useful module at self.location in self.course_id for user""" mock_request = MagicMock() mock_request.user = user field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.course.id, user, self.course, depth=2) if item is None: item = self.lti return render.get_module( # pylint: disable=protected-access user, mock_request, item.location, field_data_cache, )._xmodule def test_rebind_module_to_new_users(self): module = self.get_module_for_user(self.user, self.problem) # Bind the module to another student, which will remove "correct_map" # from the module's _field_data_cache and _dirty_fields. user2 = UserFactory.create() module.descriptor.bind_for_student(module.system, user2.id) # XBlock's save method assumes that if a field is in _dirty_fields, # then it's also in _field_data_cache. If this assumption # doesn't hold, then we get an error trying to bind this module # to a third student, since we've removed "correct_map" from # _field_data cache, but not _dirty_fields, when we bound # this module to the second student. (TNL-2640) user3 = UserFactory.create() module.descriptor.bind_for_student(module.system, user3.id) def test_rebind_noauth_module_to_user_not_anonymous(self): """ Tests that an exception is thrown when rebind_noauth_module_to_user is run from a module bound to a real user """ module = self.get_module_for_user(self.user) user2 = UserFactory() user2.id = 2 with self.assertRaisesRegexp( render.LmsModuleRenderError, "rebind_noauth_module_to_user can only be called from a module bound to an anonymous user" ): self.assertTrue(module.system.rebind_noauth_module_to_user(module, user2)) def test_rebind_noauth_module_to_user_anonymous(self): """ Tests that get_user_module_for_noauth succeeds when rebind_noauth_module_to_user is run from a module bound to AnonymousUser """ module = self.get_module_for_user(self.anon_user) user2 = UserFactory() user2.id = 2 module.system.rebind_noauth_module_to_user(module, user2) self.assertTrue(module) self.assertEqual(module.system.anonymous_student_id, anonymous_id_for_user(user2, self.course.id)) self.assertEqual(module.scope_ids.user_id, user2.id) self.assertEqual(module.descriptor.scope_ids.user_id, user2.id) @attr('shard_1') @ddt.ddt class TestEventPublishing(ModuleStoreTestCase, LoginEnrollmentTestCase): """ Tests of event publishing for both XModules and XBlocks. """ def setUp(self): """ Set up the course and user context """ super(TestEventPublishing, self).setUp() self.mock_user = UserFactory() self.mock_user.id = 1 self.request_factory = RequestFactory() @ddt.data('xblock', 'xmodule') @XBlock.register_temp_plugin(PureXBlock, identifier='xblock') @XBlock.register_temp_plugin(EmptyXModuleDescriptor, identifier='xmodule') @patch.object(render, 'make_track_function') def test_event_publishing(self, block_type, mock_track_function): request = self.request_factory.get('') request.user = self.mock_user course = CourseFactory() descriptor = ItemFactory(category=block_type, parent=course) field_data_cache = FieldDataCache([course, descriptor], course.id, self.mock_user) # pylint: disable=no-member block = render.get_module(self.mock_user, request, descriptor.location, field_data_cache) event_type = 'event_type' event = {'event': 'data'} block.runtime.publish(block, event_type, event) mock_track_function.assert_called_once_with(request) mock_track_function.return_value.assert_called_once_with(event_type, event) @attr('shard_1') @ddt.ddt class LMSXBlockServiceBindingTest(SharedModuleStoreTestCase): """ Tests that the LMS Module System (XBlock Runtime) provides an expected set of services. """ @classmethod def setUpClass(cls): super(LMSXBlockServiceBindingTest, cls).setUpClass() cls.course = CourseFactory.create() def setUp(self): """ Set up the user and other fields that will be used to instantiate the runtime. """ super(LMSXBlockServiceBindingTest, self).setUp() self.user = UserFactory() self.student_data = Mock() self.track_function = Mock() self.xqueue_callback_url_prefix = Mock() self.request_token = Mock() @XBlock.register_temp_plugin(PureXBlock, identifier='pure') @ddt.data("user", "i18n", "fs", "field-data", "bookmarks") def test_expected_services_exist(self, expected_service): """ Tests that the 'user', 'i18n', and 'fs' services are provided by the LMS runtime. """ descriptor = ItemFactory(category="pure", parent=self.course) runtime, _ = render.get_module_system_for_user( self.user, self.student_data, descriptor, self.course.id, self.track_function, self.xqueue_callback_url_prefix, self.request_token, course=self.course ) service = runtime.service(descriptor, expected_service) self.assertIsNotNone(service) def test_beta_tester_fields_added(self): """ Tests that the beta tester fields are set on LMS runtime. """ descriptor = ItemFactory(category="pure", parent=self.course) descriptor.days_early_for_beta = 5 runtime, _ = render.get_module_system_for_user( self.user, self.student_data, descriptor, self.course.id, self.track_function, self.xqueue_callback_url_prefix, self.request_token, course=self.course ) # pylint: disable=no-member self.assertFalse(runtime.user_is_beta_tester) self.assertEqual(runtime.days_early_for_beta, 5) class PureXBlockWithChildren(PureXBlock): """ Pure XBlock with children to use in tests. """ has_children = True class EmptyXModuleWithChildren(EmptyXModule): # pylint: disable=abstract-method """ Empty XModule for testing with no dependencies. """ has_children = True class EmptyXModuleDescriptorWithChildren(EmptyXModuleDescriptor): # pylint: disable=abstract-method """ Empty XModule for testing with no dependencies. """ module_class = EmptyXModuleWithChildren has_children = True BLOCK_TYPES = ['xblock', 'xmodule'] USER_NUMBERS = range(2) @attr('shard_1') @ddt.ddt class TestFilteredChildren(SharedModuleStoreTestCase): """ Tests that verify access to XBlock/XModule children work correctly even when those children are filtered by the runtime when loaded. """ @classmethod def setUpClass(cls): super(TestFilteredChildren, cls).setUpClass() cls.course = CourseFactory.create() # pylint: disable=attribute-defined-outside-init, no-member def setUp(self): super(TestFilteredChildren, self).setUp() self.users = {number: UserFactory() for number in USER_NUMBERS} self._old_has_access = render.has_access patcher = patch('courseware.module_render.has_access', self._has_access) patcher.start() self.addCleanup(patcher.stop) @ddt.data(*BLOCK_TYPES) @XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock') @XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule') def test_unbound(self, block_type): block = self._load_block(block_type) self.assertUnboundChildren(block) @ddt.data(*itertools.product(BLOCK_TYPES, USER_NUMBERS)) @ddt.unpack @XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock') @XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule') def test_unbound_then_bound_as_descriptor(self, block_type, user_number): user = self.users[user_number] block = self._load_block(block_type) self.assertUnboundChildren(block) self._bind_block(block, user) self.assertBoundChildren(block, user) @ddt.data(*itertools.product(BLOCK_TYPES, USER_NUMBERS)) @ddt.unpack @XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock') @XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule') def test_unbound_then_bound_as_xmodule(self, block_type, user_number): user = self.users[user_number] block = self._load_block(block_type) self.assertUnboundChildren(block) self._bind_block(block, user) # Validate direct XModule access as well if isinstance(block, XModuleDescriptor): self.assertBoundChildren(block._xmodule, user) # pylint: disable=protected-access else: self.assertBoundChildren(block, user) @ddt.data(*itertools.product(BLOCK_TYPES, USER_NUMBERS)) @ddt.unpack @XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock') @XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule') def test_bound_only_as_descriptor(self, block_type, user_number): user = self.users[user_number] block = self._load_block(block_type) self._bind_block(block, user) self.assertBoundChildren(block, user) @ddt.data(*itertools.product(BLOCK_TYPES, USER_NUMBERS)) @ddt.unpack @XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock') @XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule') def test_bound_only_as_xmodule(self, block_type, user_number): user = self.users[user_number] block = self._load_block(block_type) self._bind_block(block, user) # Validate direct XModule access as well if isinstance(block, XModuleDescriptor): self.assertBoundChildren(block._xmodule, user) # pylint: disable=protected-access else: self.assertBoundChildren(block, user) def _load_block(self, block_type): """ Instantiate an XBlock of `block_type` with the appropriate set of children. """ self.parent = ItemFactory(category=block_type, parent=self.course) # Create a child of each block type for each user self.children_for_user = { user: [ ItemFactory(category=child_type, parent=self.parent).scope_ids.usage_id for child_type in BLOCK_TYPES ] for user in self.users.itervalues() } self.all_children = sum(self.children_for_user.values(), []) return modulestore().get_item(self.parent.scope_ids.usage_id) def _bind_block(self, block, user): """ Bind `block` to the supplied `user`. """ course_id = self.course.id field_data_cache = FieldDataCache.cache_for_descriptor_descendents( course_id, user, block, ) return get_module_for_descriptor( user, Mock(name='request', user=user), block, field_data_cache, course_id, course=self.course ) def _has_access(self, user, action, obj, course_key=None): """ Mock implementation of `has_access` used to control which blocks have access to which children during tests. """ if action != 'load': return self._old_has_access(user, action, obj, course_key) if isinstance(obj, XBlock): key = obj.scope_ids.usage_id elif isinstance(obj, UsageKey): key = obj if key == self.parent.scope_ids.usage_id: return True return key in self.children_for_user[user] def assertBoundChildren(self, block, user): """ Ensure the bound children are indeed children. """ self.assertChildren(block, self.children_for_user[user]) def assertUnboundChildren(self, block): """ Ensure unbound children are indeed children. """ self.assertChildren(block, self.all_children) def assertChildren(self, block, child_usage_ids): """ Used to assert that sets of children are equivalent. """ self.assertEquals(set(child_usage_ids), set(child.scope_ids.usage_id for child in block.get_children())) @attr('shard_1') @ddt.ddt class TestDisabledXBlockTypes(ModuleStoreTestCase): """ Tests that verify disabled XBlock types are not loaded. """ # pylint: disable=no-member def setUp(self): super(TestDisabledXBlockTypes, self).setUp() for store in self.store.modulestores: store.disabled_xblock_types = ('video',) @ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) def test_get_item(self, default_ms): with self.store.default_store(default_ms): course = CourseFactory() for block_type in ('video',): item = ItemFactory(category=block_type, parent=course) item = self.store.get_item(item.scope_ids.usage_id) self.assertEqual(item.__class__.__name__, 'RawDescriptorWithMixins')
agpl-3.0
DevOps4Networks/ansible
test/units/playbook/test_play.py
118
3933
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.compat.tests import unittest from ansible.compat.tests.mock import patch, MagicMock from ansible.errors import AnsibleError, AnsibleParserError from ansible.playbook.block import Block from ansible.playbook.play import Play from ansible.playbook.role import Role from units.mock.loader import DictDataLoader class TestPlay(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_empty_play(self): p = Play.load(dict()) self.assertEqual(str(p), '') def test_basic_play(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, connection='local', remote_user="root", sudo=True, sudo_user="testing", )) def test_play_with_user_conflict(self): p = Play.load(dict( name="test play", hosts=['foo'], user="testing", gather_facts=False, )) self.assertEqual(p.remote_user, "testing") def test_play_with_user_conflict(self): play_data = dict( name="test play", hosts=['foo'], user="testing", remote_user="testing", ) self.assertRaises(AnsibleParserError, Play.load, play_data) def test_play_with_tasks(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[dict(action='shell echo "hello world"')], )) def test_play_with_handlers(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, handlers=[dict(action='shell echo "hello world"')], )) def test_play_with_pre_tasks(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, pre_tasks=[dict(action='shell echo "hello world"')], )) def test_play_with_post_tasks(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, post_tasks=[dict(action='shell echo "hello world"')], )) def test_play_with_roles(self): fake_loader = DictDataLoader({ '/etc/ansible/roles/foo/tasks.yml': """ - name: role task shell: echo "hello world" """, }) p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, roles=['foo'], ), loader=fake_loader) blocks = p.compile() def test_play_compile(self): p = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[dict(action='shell echo "hello world"')], )) blocks = p.compile() # with a single block, there will still be three # implicit meta flush_handler blocks inserted self.assertEqual(len(blocks), 4)
gpl-3.0