code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
from setuptools import setup, find_packages
setup(name='BIOMD0000000322',
version=20140916,
description='BIOMD0000000322 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main/BIOMD0000000322',
maintainer='Stanley Gu',
maintainer_url='stanleygu@gmail.com',
packages=find_packages(),
package_data={'': ['*.xml', 'README.md']},
) | biomodels/BIOMD0000000322 | setup.py | Python | cc0-1.0 | 377 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-06-19 15:22
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('caffe', '0005_auto_20160619_1552'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Position',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('caffe', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='caffe.Caffe')),
],
options={
'ordering': ('name',),
'default_permissions': ('add', 'change', 'delete', 'view'),
},
),
migrations.CreateModel(
name='WorkedHours',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('start_time', models.TimeField()),
('end_time', models.TimeField()),
('date', models.DateField()),
('caffe', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='caffe.Caffe')),
('employee', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('position', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hours.Position')),
],
options={
'ordering': ('-date', '-end_time'),
'default_permissions': ('add', 'change', 'delete', 'view', 'change_all'),
},
),
migrations.AlterUniqueTogether(
name='position',
unique_together=set([('name', 'caffe')]),
),
]
| VirrageS/io-kawiarnie | caffe/hours/migrations/0001_initial.py | Python | mit | 2,243 |
#! /usr/bin/env python
import sys
import os
sys.path.append(os.path.join('..', '..'))
import unittest
from ibe.commons import gen_public_key
class Test(unittest.TestCase):
def test_gen_public_key(self):
pkey1 = gen_public_key('a@a.com', 'pkg public key 1')
pkey2 = gen_public_key('b@b.com', 'pkg public key 1')
pkey3 = gen_public_key('a@a.com', 'pkg public key 2')
self.assertNotEqual(pkey1, pkey2)
self.assertNotEqual(pkey2, pkey3)
self.assertNotEqual(pkey1, pkey3)
pkey11 = gen_public_key('a@a.com', 'pkg public key 1')
self.assertEqual(pkey1, pkey11)
if __name__ == '__main__':
unittest.main()
| lucasperin/IBE | ibe/test/test.py | Python | mit | 673 |
from django.contrib import admin
'''from tester.models import Club,Member,Signup,Event
class admin_club(admin.ModelAdmin):
list_display=["club_name"]
class admin_event(admin.ModelAdmin):
list_display=["event_name"]
class admin_student(admin.ModelAdmin):
list_display=["usn","name"]
class admin_member(admin.ModelAdmin):
list_display=["club_id","usn"]
admin.site.register(Club,admin_club)
admin.site.register(Member,admin_member)
admin.site.register(Signup,admin_student)
admin.site.register(Event,admin_event)
'''
| anirudhagar13/PES-Portal | pes_portal/club/admin.py | Python | apache-2.0 | 548 |
#!/usr/bin/python3
from gi.repository import Gio, GObject
from SettingsWidgets import *
from TreeListWidgets import List
import collections
import json
import operator
CAN_BACKEND.append("List")
JSON_SETTINGS_PROPERTIES_MAP = {
"description" : "label",
"min" : "mini",
"max" : "maxi",
"step" : "step",
"units" : "units",
"show-value" : "show_value",
"select-dir" : "dir_select",
"height" : "height",
"tooltip" : "tooltip",
"possible" : "possible",
"expand-width" : "expand_width",
"columns" : "columns"
}
OPERATIONS = ['<=', '>=', '<', '>', '!=', '=']
OPERATIONS_MAP = {'<': operator.lt, '<=': operator.le, '>': operator.gt, '>=': operator.ge, '!=': operator.ne, '=': operator.eq}
class JSONSettingsHandler(object):
def __init__(self, filepath, notify_callback=None):
super(JSONSettingsHandler, self).__init__()
self.resume_timeout = None
self.notify_callback = notify_callback
self.filepath = filepath
self.file_obj = Gio.File.new_for_path(self.filepath)
self.file_monitor = self.file_obj.monitor_file(Gio.FileMonitorFlags.SEND_MOVED, None)
self.file_monitor.connect("changed", self.check_settings)
self.bindings = {}
self.listeners = {}
self.deps = {}
self.settings = self.get_settings()
def bind(self, key, obj, prop, direction, map_get=None, map_set=None):
if direction & (Gio.SettingsBindFlags.SET | Gio.SettingsBindFlags.GET) == 0:
direction |= Gio.SettingsBindFlags.SET | Gio.SettingsBindFlags.GET
binding_info = {"obj": obj, "prop": prop, "dir": direction, "map_get": map_get, "map_set": map_set}
if key not in self.bindings:
self.bindings[key] = []
self.bindings[key].append(binding_info)
if direction & Gio.SettingsBindFlags.GET != 0:
self.set_object_value(binding_info, self.get_value(key))
if direction & Gio.SettingsBindFlags.SET != 0:
binding_info["oid"] = obj.connect("notify::"+prop, self.object_value_changed, key)
def listen(self, key, callback):
if key not in self.listeners:
self.listeners[key] = []
self.listeners[key].append(callback)
def get_value(self, key):
return self.get_property(key, "value")
def set_value(self, key, value):
if value != self.settings[key]["value"]:
self.settings[key]["value"] = value
self.save_settings()
if self.notify_callback:
self.notify_callback(self, key, value)
if key in self.bindings:
for info in self.bindings[key]:
self.set_object_value(info, value)
if key in self.listeners:
for callback in self.listeners[key]:
callback(key, value)
def get_property(self, key, prop):
props = self.settings[key]
return props[prop]
def has_property(self, key, prop):
return prop in self.settings[key]
def has_key(self, key):
return key in self.settings
def object_value_changed(self, obj, value, key):
for info in self.bindings[key]:
if obj == info["obj"]:
value = info["obj"].get_property(info["prop"])
if "map_set" in info and info["map_set"] != None:
value = info["map_set"](value)
for info in self.bindings[key]:
if obj != info["obj"]:
self.set_object_value(info, value)
self.set_value(key, value)
if key in self.listeners:
for callback in self.listeners[key]:
callback(key, value)
def set_object_value(self, info, value):
if info["dir"] & Gio.SettingsBindFlags.GET == 0:
return
with info["obj"].freeze_notify():
if "map_get" in info and info["map_get"] != None:
value = info["map_get"](value)
if value != info["obj"].get_property(info["prop"]) and value is not None:
info["obj"].set_property(info["prop"], value)
def check_settings(self, *args):
old_settings = self.settings
self.settings = self.get_settings()
for key in self.bindings:
new_value = self.settings[key]["value"]
if new_value != old_settings[key]["value"]:
for info in self.bindings[key]:
self.set_object_value(info, new_value)
for key, callback_list in self.listeners.items():
new_value = self.settings[key]["value"]
if new_value != old_settings[key]["value"]:
for callback in callback_list:
callback(key, new_value)
def get_settings(self):
file = open(self.filepath)
raw_data = file.read()
file.close()
try:
settings = json.loads(raw_data, encoding=None, object_pairs_hook=collections.OrderedDict)
except:
raise Exception("Failed to parse settings JSON data for file %s" % (self.filepath))
return settings
def save_settings(self):
self.pause_monitor()
if os.path.exists(self.filepath):
os.remove(self.filepath)
raw_data = json.dumps(self.settings, indent=4)
new_file = open(self.filepath, 'w+')
new_file.write(raw_data)
new_file.close()
self.resume_monitor()
def pause_monitor(self):
self.file_monitor.cancel()
self.handler = None
def resume_monitor(self):
if self.resume_timeout:
GLib.source_remove(self.resume_timeout)
self.resume_timeout = GLib.timeout_add(2000, self.do_resume)
def do_resume(self):
self.file_monitor = self.file_obj.monitor_file(Gio.FileMonitorFlags.SEND_MOVED, None)
self.handler = self.file_monitor.connect("changed", self.check_settings)
self.resume_timeout = None
return False
def reset_to_defaults(self):
for key in self.settings:
if "value" in self.settings[key]:
self.settings[key]["value"] = self.settings[key]["default"]
self.do_key_update(key)
self.save_settings()
def do_key_update(self, key):
if key in self.bindings:
for info in self.bindings[key]:
self.set_object_value(info, self.settings[key]["value"])
if key in self.listeners:
for callback in self.listeners[key]:
callback(key, self.settings[key]["value"])
def load_from_file(self, filepath):
file = open(filepath)
raw_data = file.read()
file.close()
try:
settings = json.loads(raw_data, encoding=None, object_pairs_hook=collections.OrderedDict)
except:
raise Exception("Failed to parse settings JSON data for file %s" % (self.filepath))
for key in self.settings:
if "value" not in self.settings[key]:
continue
if key in settings and "value" in self.settings[key]:
self.settings[key]["value"] = settings[key]["value"]
self.do_key_update(key)
else:
print("Skipping key %s: the key does not exist in %s or has no value" % (key, filepath))
self.save_settings()
def save_to_file(self, filepath):
if os.path.exists(filepath):
os.remove(filepath)
raw_data = json.dumps(self.settings, indent=4)
new_file = open(filepath, 'w+')
new_file.write(raw_data)
new_file.close()
class JSONSettingsRevealer(Gtk.Revealer):
def __init__(self, settings, key):
super(JSONSettingsRevealer, self).__init__()
self.settings = settings
self.key = None
self.op = None
self.value = None
for op in OPERATIONS:
if op in key:
self.op = op
self.key, self.value = key.split(op)
break
if self.key is None:
if key[:1] is '!':
self.invert = True
self.key = key[1:]
else:
self.invert = False
self.key = key
self.box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=15)
Gtk.Revealer.add(self, self.box)
self.set_transition_type(Gtk.RevealerTransitionType.SLIDE_DOWN)
self.set_transition_duration(150)
self.settings.listen(self.key, self.key_changed)
self.key_changed(self.key, self.settings.get_value(self.key))
def add(self, widget):
self.box.pack_start(widget, False, True, 0)
def key_changed(self, key, value):
if self.op is not None:
val_type = type(value)
self.set_reveal_child(OPERATIONS_MAP[self.op](value, val_type(self.value)))
elif value != self.invert:
self.set_reveal_child(True)
else:
self.set_reveal_child(False)
class JSONSettingsBackend(object):
def attach(self):
if hasattr(self, "set_rounding") and self.settings.has_property(self.key, "round"):
self.set_rounding(self.settings.get_property(self.key, "round"))
if hasattr(self, "bind_object"):
bind_object = self.bind_object
else:
bind_object = self.content_widget
if self.bind_dir != None:
self.settings.bind(self.key, bind_object, self.bind_prop, self.bind_dir,
self.map_get if hasattr(self, "map_get") else None,
self.map_set if hasattr(self, "map_set") else None)
else:
self.settings.listen(self.key, self.on_setting_changed)
self.on_setting_changed()
self.connect_widget_handlers()
def set_value(self, value):
self.settings.set_value(self.key, value)
def get_value(self):
return self.settings.get_value(self.key)
def get_range(self):
min = self.settings.get_property(self.key, "min")
max = self.settings.get_property(self.key, "max")
return [min, max]
def on_setting_changed(self, *args):
raise NotImplementedError("SettingsWidget class must implement on_setting_changed().")
def connect_widget_handlers(self, *args):
if self.bind_dir == None:
raise NotImplementedError("SettingsWidget classes with no .bind_dir must implement connect_widget_handlers().")
def json_settings_factory(subclass):
class NewClass(globals()[subclass], JSONSettingsBackend):
def __init__(self, key, settings, properties):
self.key = key
self.settings = settings
kwargs = {}
for prop in properties:
if prop in JSON_SETTINGS_PROPERTIES_MAP:
kwargs[JSON_SETTINGS_PROPERTIES_MAP[prop]] = properties[prop]
elif prop == "options":
kwargs["options"] = []
for value, label in properties[prop].items():
kwargs["options"].append((label, value))
super(NewClass, self).__init__(**kwargs)
self.attach()
return NewClass
for widget in CAN_BACKEND:
globals()["JSONSettings"+widget] = json_settings_factory(widget)
| collinss/Cinnamon | files/usr/share/cinnamon/cinnamon-settings/bin/JsonSettingsWidgets.py | Python | gpl-2.0 | 11,380 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
AlgorithmLocatorFilter.py
-------------------------
Date : May 2017
Copyright : (C) 2017 by Nyall Dawson
Email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Nyall Dawson'
__date__ = 'May 2017'
__copyright__ = '(C) 2017, Nyall Dawson'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import (QgsApplication,
QgsProcessingAlgorithm,
QgsLocatorFilter,
QgsLocatorResult)
from processing.gui.MessageDialog import MessageDialog
from processing.gui.AlgorithmDialog import AlgorithmDialog
from qgis.utils import iface
class AlgorithmLocatorFilter(QgsLocatorFilter):
def __init__(self, parent=None):
super(AlgorithmLocatorFilter, self).__init__(parent)
def clone(self):
return AlgorithmLocatorFilter()
def name(self):
return 'processing_alg'
def displayName(self):
return self.tr('Processing Algorithms')
def priority(self):
return QgsLocatorFilter.Low
def prefix(self):
return 'a'
def flags(self):
return QgsLocatorFilter.FlagFast
def fetchResults(self, string, context, feedback):
# collect results in main thread, since this method is inexpensive and
# accessing the processing registry is not thread safe
for a in QgsApplication.processingRegistry().algorithms():
if a.flags() & QgsProcessingAlgorithm.FlagHideFromToolbox:
continue
if QgsLocatorFilter.stringMatches(a.displayName(), string) or [t for t in a.tags() if QgsLocatorFilter.stringMatches(t, string)]:
result = QgsLocatorResult()
result.filter = self
result.displayString = a.displayName()
result.icon = a.icon()
result.userData = a.id()
if string and QgsLocatorFilter.stringMatches(a.displayName(), string):
result.score = float(len(string)) / len(a.displayName())
else:
result.score = 0
self.resultFetched.emit(result)
def triggerResult(self, result):
alg = QgsApplication.processingRegistry().createAlgorithmById(result.userData)
if alg:
ok, message = alg.canExecute()
if not ok:
dlg = MessageDialog()
dlg.setTitle(self.tr('Missing dependency'))
dlg.setMessage(message)
dlg.exec_()
return
dlg = alg.createCustomParametersWidget(None)
if not dlg:
dlg = AlgorithmDialog(alg)
canvas = iface.mapCanvas()
prevMapTool = canvas.mapTool()
dlg.show()
dlg.exec_()
if canvas.mapTool() != prevMapTool:
try:
canvas.mapTool().reset()
except:
pass
canvas.setMapTool(prevMapTool)
| stevenmizuno/QGIS | python/plugins/processing/gui/AlgorithmLocatorFilter.py | Python | gpl-2.0 | 3,785 |
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Rocky Bernstein
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA.
''' Not a command. A stub class used by a command in its 'main' for
demonstrating how the command works.'''
import os, sys
from import_relative import import_relative
import_relative('lib', '...', 'trepan')
breakpoint = import_relative('breakpoint', '...lib', 'trepan')
default = import_relative('default', '...lib', 'trepan') # Default settings
class MockIO:
def readline(self, prompt='', add_to_history=False):
print prompt
return 'quit'
pass
class MockUserInterface:
def __init__(self):
self.io = MockIO()
return
def confirm(self, msg, default):
print '** %s' % msg
# Ignore the default.
return True
def errmsg(self, msg):
print '** %s' % msg
return
def finalize(self, last_wishes=None):
return
def msg(self, msg):
print msg
return
def msg_nocr(self, msg):
sys.stdout.write(msg)
return
pass
class MockProcessor:
def __init__(self, core):
self.core = core
self.debugger = core.debugger
self.continue_running = False
self.curframe = None
self.event2short = {}
self.frame = None
self.intf = core.debugger.intf
self.last_command = None
self.stack = []
return
def get_int(self, arg, min_value=0, default=1, cmdname=None,
at_most=None):
return None
def undefined_cmd(self, cmd):
self.intf[-1].errmsg('Undefined mock command: "%s' % cmd)
return
pass
# External Egg packages
import tracefilter
class MockDebuggerCore:
def __init__(self, debugger):
self.debugger = debugger
self.execution_status = 'Pre-execution'
self.filename_cache = {}
self.ignore_filter = tracefilter.TraceFilter([])
self.bpmgr = breakpoint.BreakpointManager()
self.processor = MockProcessor(self)
self.step_ignore = -1
self.stop_frame = None
self.last_lineno = None
self.last_filename = None
self.different_line = None
return
def set_next(self, frame, step_events=None):
pass
def stop(self): pass
def canonic(self, filename):
return filename
def canonic_filename(self, frame):
return frame.f_code.co_filename
def filename(self, name):
return name
def is_running(self):
return 'Running' == self.execution_status
def get_file_breaks(self, filename):
return []
pass
class MockDebugger:
def __init__(self):
self.intf = [MockUserInterface()]
self.core = MockDebuggerCore(self)
self.settings = default.DEBUGGER_SETTINGS
self.orig_sys_argv = None
self.program_sys_argv = []
return
def stop(self): pass
def restart_argv(self): return []
pass
def dbg_setup(d = None):
if d is None: d = MockDebugger()
bwproc = import_relative('main', os.path.pardir)
cp = bwproc.BWProcessor(d.core)
return d, cp
| pombreda/pydbgr | trepan/bwprocessor/command/mock.py | Python | gpl-3.0 | 3,931 |
import unittest
class TestAction(unittest.TestCase):
def test_execute_action(self):
from coopy.foundation import Action, RecordClock
from datetime import datetime
from coopy.utils import inject
class Dummy(object):
def __init__(self):
self.exec_count = 0
def business_method_noargs(self):
self.exec_count += 1
def business_method_args(self, arg):
self.exec_count += 2
def business_method_kwargs(self, keyword_arg="test"):
self.exec_count += 3
dummy = Dummy()
# force clock into dummy
inject(dummy, '_clock', RecordClock())
action = Action('caller_id',
'business_method_noargs',
datetime.now(),
(),
{})
action.execute_action(dummy)
self.assertEquals(1, dummy.exec_count)
action = Action('caller_id',
'business_method_args',
datetime.now(),
([1]),
{})
action.execute_action(dummy)
self.assertEquals(3, dummy.exec_count)
action = Action('caller_id',
'business_method_kwargs',
datetime.now(),
(),
{'keyword_arg' : 'test'})
action.execute_action(dummy)
self.assertEquals(6, dummy.exec_count)
class TestRecordClock(unittest.TestCase):
def test_record_clock(self):
from coopy.foundation import RecordClock
clock = RecordClock()
self.assertTrue(len(clock.results) == 0)
dt1 = clock.now()
self.assertEquals(dt1, clock.results[0])
dt2 = clock.now()
self.assertEquals(dt2, clock.results[1])
dt = clock.today()
self.assertEquals(dt, clock.results[2])
utcnow = clock.utcnow()
self.assertEquals(utcnow, clock.results[3])
class TestRestoreClock(unittest.TestCase):
def test_restore_clock(self):
from coopy.foundation import RestoreClock
from datetime import datetime, date
dt1 = datetime.now()
dt2 = date.today()
dt3 = datetime.utcnow()
clock = RestoreClock([dt1, dt2, dt3])
self.assertEquals(dt1, clock.now())
self.assertEquals(dt2, clock.today())
self.assertEquals(dt3, clock.utcnow())
if __name__ == "__main__":
unittest.main()
| felipecruz/coopy | tests/unit/test_foundation.py | Python | bsd-3-clause | 2,547 |
#!python
import sys
import io
import re
import urllib
import urllib2
import urlparse
import lxml.etree
def get_outlinks(url):
'''
url: the url to the page to crawl
'''
result = []
if url is None:
return result
html = None
resp = None
try:
url = url.strip()
resp = urllib2.urlopen(url)
if resp.code == 200:
html = resp.read()
except (urllib2.URLError, Exception) as e:
print "can't access {0}: {1}".format(url, e)
finally:
if resp is not None:
resp.close()
if html is None:
return result
html_parser = lxml.etree.HTMLParser()
try:
uhtml = html.decode('utf-8', 'ignore')
tree = lxml.etree.parse(io.StringIO(uhtml), html_parser)
anchors = tree.xpath('//a')
for anchor in anchors:
href = anchor.attrib.get('href', None)
if href is not None:
href = href.strip()
dest = urlparse.urljoin(url, href)
if dest.startswith('http://'):
result.append(dest)
except Exception as e:
print "can't parse {0}: {1}".format(url, e)
return result
def crawl(urls,
max_to_handle,
handle_url,
crawl_test = None,
handle_test = None):
handled = []
visited = set()
i = 0
p = 0
while len(handled) < max_to_handle and i < len(urls):
url = urls[i]
if url not in visited and crawl_test(url):
outlinks = get_outlinks(url)
visited.add(url)
urls.extend(outlinks)
if handle_test(url) and url not in handled:
handle_url(url, p + 1, max_to_handle)
handled.append(url)
p += 1
i += 1
return handled
def call_semantics_service(url, i, max_to_handle):
service_pattern = "http://ecology-service.cse.tamu.edu/BigSemanticsService/metadata.xml?url={0}"
qurl = urllib.quote(url)
surl = service_pattern.format(qurl)
resp = urllib2.urlopen(surl)
content = resp.read()
is_downloaded = content.find('DOWNLOAD_DONE') >= 0
is_typed = content.find('</amazon_product>') >= 0
if resp.code == 200 and is_downloaded and is_typed:
print "[{0}/{1}] service called on {2}".format(
i, max_to_handle, url)
else:
print "[{0}/{1}] error calling service: {2}: c={3}, d={4}, t={5}".format(
i, max_to_handle, surl, resp.code, is_downloaded, is_typed)
def call_downloader_service(url, i, max_to_handle):
agent = "Mozilla%2F5.0%20(Windows%20NT%206.2%3B%20Win64%3B%20x64)%20AppleWebKit%2F537.36%20(KHTML%2C%20like%20Gecko)%20Chrome%2F32.0.1667.0%20Safari%2F537.36"
service_pattern = "http://ecology-service.cse.tamu.edu/DownloaderPool/page/download.xml?url={0}&agent={1}"
qurl = urllib.quote(url)
resp = urllib2.urlopen(service_pattern.format(qurl, agent))
if resp.code == 200:
print "[{0}/{1}] successful downloading invocation on {2}".format(
i, max_to_handle, url)
else:
print "[{0}/{1}] downloading error code {2} for {3}".format(
i, max_to_handle, resp.code, url)
if __name__ == '__main__':
if len(sys.argv) < 3:
print "usage: {0} <url_lst_file> <max_to_handle>".format(sys.argv[0])
quit()
f = open(sys.argv[1])
urls = f.readlines()
n = int(sys.argv[2])
crawl_test = lambda(url): url.find('amazon.com') > 0;
p_prod = r'^http://www.amazon.com/([^/]+/)?dp/[^/]+';
handle_test = lambda(url): re.search(p_prod, url) is not None;
handled = crawl(urls, n, call_semantics_service, crawl_test, handle_test);
for url in handled:
print url
| ecologylab/BigSemanticsService | Scripts/utils/crawl.py | Python | apache-2.0 | 3,552 |
import numpy as np
import pytest
import pandas as pd
from pandas import (
MultiIndex,
Series,
)
import pandas._testing as tm
@pytest.mark.parametrize("operation, expected", [("min", "a"), ("max", "b")])
def test_reductions_series_strings(operation, expected):
# GH#31746
ser = Series(["a", "b"], dtype="string")
res_operation_serie = getattr(ser, operation)()
assert res_operation_serie == expected
@pytest.mark.parametrize("as_period", [True, False])
def test_mode_extension_dtype(as_period):
# GH#41927 preserve dt64tz dtype
ser = Series([pd.Timestamp(1979, 4, n) for n in range(1, 5)])
if as_period:
ser = ser.dt.to_period("D")
else:
ser = ser.dt.tz_localize("US/Central")
res = ser.mode()
assert res.dtype == ser.dtype
tm.assert_series_equal(res, ser)
def test_reductions_td64_with_nat():
# GH#8617
ser = Series([0, pd.NaT], dtype="m8[ns]")
exp = ser[0]
assert ser.median() == exp
assert ser.min() == exp
assert ser.max() == exp
@pytest.mark.parametrize("skipna", [True, False])
def test_td64_sum_empty(skipna):
# GH#37151
ser = Series([], dtype="timedelta64[ns]")
result = ser.sum(skipna=skipna)
assert isinstance(result, pd.Timedelta)
assert result == pd.Timedelta(0)
def test_td64_summation_overflow():
# GH#9442
ser = Series(pd.date_range("20130101", periods=100000, freq="H"))
ser[0] += pd.Timedelta("1s 1ms")
# mean
result = (ser - ser.min()).mean()
expected = pd.Timedelta((pd.TimedeltaIndex(ser - ser.min()).asi8 / len(ser)).sum())
# the computation is converted to float so
# might be some loss of precision
assert np.allclose(result.value / 1000, expected.value / 1000)
# sum
msg = "overflow in timedelta operation"
with pytest.raises(ValueError, match=msg):
(ser - ser.min()).sum()
s1 = ser[0:10000]
with pytest.raises(ValueError, match=msg):
(s1 - s1.min()).sum()
s2 = ser[0:1000]
(s2 - s2.min()).sum()
def test_prod_numpy16_bug():
ser = Series([1.0, 1.0, 1.0], index=range(3))
result = ser.prod()
assert not isinstance(result, Series)
def test_sum_with_level():
obj = Series([10.0], index=MultiIndex.from_tuples([(2, 3)]))
with tm.assert_produces_warning(FutureWarning):
result = obj.sum(level=0)
expected = Series([10.0], index=[2])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("func", [np.any, np.all])
@pytest.mark.parametrize("kwargs", [{"keepdims": True}, {"out": object()}])
def test_validate_any_all_out_keepdims_raises(kwargs, func):
ser = Series([1, 2])
param = list(kwargs)[0]
name = func.__name__
msg = (
f"the '{param}' parameter is not "
"supported in the pandas "
rf"implementation of {name}\(\)"
)
with pytest.raises(ValueError, match=msg):
func(ser, **kwargs)
def test_validate_sum_initial():
ser = Series([1, 2])
msg = (
r"the 'initial' parameter is not "
r"supported in the pandas "
r"implementation of sum\(\)"
)
with pytest.raises(ValueError, match=msg):
np.sum(ser, initial=10)
def test_validate_median_initial():
ser = Series([1, 2])
msg = (
r"the 'overwrite_input' parameter is not "
r"supported in the pandas "
r"implementation of median\(\)"
)
with pytest.raises(ValueError, match=msg):
# It seems like np.median doesn't dispatch, so we use the
# method instead of the ufunc.
ser.median(overwrite_input=True)
def test_validate_stat_keepdims():
ser = Series([1, 2])
msg = (
r"the 'keepdims' parameter is not "
r"supported in the pandas "
r"implementation of sum\(\)"
)
with pytest.raises(ValueError, match=msg):
np.sum(ser, keepdims=True)
| pandas-dev/pandas | pandas/tests/series/test_reductions.py | Python | bsd-3-clause | 3,878 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Title: uploader
Author: David Leclerc
Version: 0.1
Date: 01.07.2017
License: GNU General Public License, Version 3
(http://www.gnu.org/licenses/gpl.html)
Overview: This is a script that uploads all reports to a server.
Notes: ...
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
# LIBRARIES
import os
import pysftp
# USER LIBRARIES
import logger
import errors
import path
import reporter
# Define instances
Logger = logger.Logger("uploader")
# CLASSES
class Uploader(object):
def __init__(self):
"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
INIT
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
# Define report
self.report = reporter.getSFTPReport()
def upload(self, sftp, path, ext = None):
"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
UPLOAD
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
# Get all files from path
files = os.listdir(path)
# Get inside path
os.chdir(path)
# Upload files
for f in files:
# If file
if os.path.isfile(f):
# Verify extension
if "." + ext != os.path.splitext(f)[1]:
# Skip file
continue
# Info
Logger.debug("Uploading: '" + os.getcwd() + "/" + f + "'")
# Upload file
sftp.put(f, preserve_mtime = True)
# If directory
elif os.path.isdir(f):
# If directory does not exist
if f not in sftp.listdir():
# Info
Logger.debug("Making directory: '" + f + "'")
# Make directory
sftp.mkdir(f)
# Move in directory
sftp.cwd(f)
# Upload files in directory
self.upload(sftp, f, ext)
# Get back to original directory on server
sftp.cwd("..")
# Locally as well
os.chdir("..")
def run(self):
"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
RUN
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
# Test if report is empty before proceding
if not self.report.isValid():
raise errors.InvalidSFTPReport
# Disable host key checking (FIXME)
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
# Instanciate an FTP object
sftp = pysftp.Connection(
host = self.report.get(["Host"]),
username = self.report.get(["Username"]),
private_key = path.REPORTS.path + self.report.get(["Key"]),
cnopts = cnopts)
# Move to directory
sftp.cwd(self.report.get(["Path"]))
# Upload files
self.upload(sftp, path.EXPORTS.path, "json")
# Close SFTP connection
sftp.close()
def main():
"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
MAIN
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
# Define uploader
uploader = Uploader()
# Run it
uploader.run()
# Run this when script is called from terminal
if __name__ == "__main__":
main() | mm22dl/MeinKPS | uploader.py | Python | gpl-3.0 | 3,745 |
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import uuid
import mock
from oslo_config import cfg
import oslo_utils.fixture
from oslo_utils import timeutils
import six
from testtools import matchers
from keystone import assignment
from keystone import auth
from keystone.common import authorization
from keystone import config
from keystone import exception
from keystone.models import token_model
from keystone.tests import unit
from keystone.tests.unit import default_fixtures
from keystone.tests.unit.ksfixtures import database
from keystone import token
from keystone.token import provider
from keystone import trust
CONF = cfg.CONF
TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
HOST_URL = 'http://keystone:5001'
def _build_user_auth(token=None, user_id=None, username=None,
password=None, tenant_id=None, tenant_name=None,
trust_id=None):
"""Build auth dictionary.
It will create an auth dictionary based on all the arguments
that it receives.
"""
auth_json = {}
if token is not None:
auth_json['token'] = token
if username or password:
auth_json['passwordCredentials'] = {}
if username is not None:
auth_json['passwordCredentials']['username'] = username
if user_id is not None:
auth_json['passwordCredentials']['userId'] = user_id
if password is not None:
auth_json['passwordCredentials']['password'] = password
if tenant_name is not None:
auth_json['tenantName'] = tenant_name
if tenant_id is not None:
auth_json['tenantId'] = tenant_id
if trust_id is not None:
auth_json['trust_id'] = trust_id
return auth_json
class AuthTest(unit.TestCase):
def setUp(self):
self.useFixture(database.Database())
super(AuthTest, self).setUp()
self.time_fixture = self.useFixture(oslo_utils.fixture.TimeFixture())
self.load_backends()
self.load_fixtures(default_fixtures)
self.context_with_remote_user = {'environment':
{'REMOTE_USER': 'FOO',
'AUTH_TYPE': 'Negotiate'}}
self.empty_context = {'environment': {}}
self.controller = token.controllers.Auth()
def assertEqualTokens(self, a, b, enforce_audit_ids=True):
"""Assert that two tokens are equal.
Compare two tokens except for their ids. This also truncates
the time in the comparison.
"""
def normalize(token):
token['access']['token']['id'] = 'dummy'
del token['access']['token']['expires']
del token['access']['token']['issued_at']
del token['access']['token']['audit_ids']
return token
self.assertCloseEnoughForGovernmentWork(
timeutils.parse_isotime(a['access']['token']['expires']),
timeutils.parse_isotime(b['access']['token']['expires']))
self.assertCloseEnoughForGovernmentWork(
timeutils.parse_isotime(a['access']['token']['issued_at']),
timeutils.parse_isotime(b['access']['token']['issued_at']))
if enforce_audit_ids:
self.assertIn(a['access']['token']['audit_ids'][0],
b['access']['token']['audit_ids'])
self.assertThat(len(a['access']['token']['audit_ids']),
matchers.LessThan(3))
self.assertThat(len(b['access']['token']['audit_ids']),
matchers.LessThan(3))
return self.assertDictEqual(normalize(a), normalize(b))
class AuthBadRequests(AuthTest):
def test_no_external_auth(self):
"""Verify that _authenticate_external() raises exception if N/A."""
self.assertRaises(
token.controllers.ExternalAuthNotApplicable,
self.controller._authenticate_external,
context={}, auth={})
def test_empty_remote_user(self):
"""Verify that _authenticate_external() raises exception if
REMOTE_USER is set as the empty string.
"""
context = {'environment': {'REMOTE_USER': ''}}
self.assertRaises(
token.controllers.ExternalAuthNotApplicable,
self.controller._authenticate_external,
context=context, auth={})
def test_no_token_in_auth(self):
"""Verify that _authenticate_token() raises exception if no token."""
self.assertRaises(
exception.ValidationError,
self.controller._authenticate_token,
None, {})
def test_no_credentials_in_auth(self):
"""Verify that _authenticate_local() raises exception if no creds."""
self.assertRaises(
exception.ValidationError,
self.controller._authenticate_local,
None, {})
def test_empty_username_and_userid_in_auth(self):
"""Verify that empty username and userID raises ValidationError."""
self.assertRaises(
exception.ValidationError,
self.controller._authenticate_local,
None, {'passwordCredentials': {'password': 'abc',
'userId': '', 'username': ''}})
def test_authenticate_blank_request_body(self):
"""Verify sending empty json dict raises the right exception."""
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, {})
def test_authenticate_blank_auth(self):
"""Verify sending blank 'auth' raises the right exception."""
body_dict = _build_user_auth()
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_invalid_auth_content(self):
"""Verify sending invalid 'auth' raises the right exception."""
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, {'auth': 'abcd'})
def test_authenticate_user_id_too_large(self):
"""Verify sending large 'userId' raises the right exception."""
body_dict = _build_user_auth(user_id='0' * 65, username='FOO',
password='foo2')
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_username_too_large(self):
"""Verify sending large 'username' raises the right exception."""
body_dict = _build_user_auth(username='0' * 65, password='foo2')
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_tenant_id_too_large(self):
"""Verify sending large 'tenantId' raises the right exception."""
body_dict = _build_user_auth(username='FOO', password='foo2',
tenant_id='0' * 65)
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_tenant_name_too_large(self):
"""Verify sending large 'tenantName' raises the right exception."""
body_dict = _build_user_auth(username='FOO', password='foo2',
tenant_name='0' * 65)
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_token_too_large(self):
"""Verify sending large 'token' raises the right exception."""
body_dict = _build_user_auth(token={'id': '0' * 8193})
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_password_too_large(self):
"""Verify sending large 'password' raises the right exception."""
length = CONF.identity.max_password_length + 1
body_dict = _build_user_auth(username='FOO', password='0' * length)
self.assertRaises(exception.ValidationSizeError,
self.controller.authenticate,
{}, body_dict)
class AuthWithToken(AuthTest):
def test_unscoped_token(self):
"""Verify getting an unscoped token with password creds."""
body_dict = _build_user_auth(username='FOO',
password='foo2')
unscoped_token = self.controller.authenticate({}, body_dict)
self.assertNotIn('tenant', unscoped_token['access']['token'])
def test_auth_invalid_token(self):
"""Verify exception is raised if invalid token."""
body_dict = _build_user_auth(token={"id": uuid.uuid4().hex})
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
def test_auth_bad_formatted_token(self):
"""Verify exception is raised if invalid token."""
body_dict = _build_user_auth(token={})
self.assertRaises(
exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_auth_unscoped_token_no_project(self):
"""Verify getting an unscoped token with an unscoped token."""
body_dict = _build_user_auth(
username='FOO',
password='foo2')
unscoped_token = self.controller.authenticate({}, body_dict)
body_dict = _build_user_auth(
token=unscoped_token["access"]["token"])
unscoped_token_2 = self.controller.authenticate({}, body_dict)
self.assertEqualTokens(unscoped_token, unscoped_token_2)
def test_auth_unscoped_token_project(self):
"""Verify getting a token in a tenant with an unscoped token."""
# Add a role in so we can check we get this back
self.assignment_api.add_role_to_user_and_project(
self.user_foo['id'],
self.tenant_bar['id'],
self.role_member['id'])
# Get an unscoped token
body_dict = _build_user_auth(
username='FOO',
password='foo2')
unscoped_token = self.controller.authenticate({}, body_dict)
# Get a token on BAR tenant using the unscoped token
body_dict = _build_user_auth(
token=unscoped_token["access"]["token"],
tenant_name="BAR")
scoped_token = self.controller.authenticate({}, body_dict)
tenant = scoped_token["access"]["token"]["tenant"]
roles = scoped_token["access"]["metadata"]["roles"]
self.assertEqual(self.tenant_bar['id'], tenant["id"])
self.assertThat(roles, matchers.Contains(self.role_member['id']))
def test_auth_scoped_token_bad_project_with_debug(self):
"""Authenticating with an invalid project fails."""
# Bug 1379952 reports poor user feedback, even in debug mode,
# when the user accidentally passes a project name as an ID.
# This test intentionally does exactly that.
body_dict = _build_user_auth(
username=self.user_foo['name'],
password=self.user_foo['password'],
tenant_id=self.tenant_bar['name'])
# with debug enabled, this produces a friendly exception.
self.config_fixture.config(debug=True)
e = self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
# explicitly verify that the error message shows that a *name* is
# found where an *ID* is expected
self.assertIn(
'Project ID not found: %s' % self.tenant_bar['name'],
six.text_type(e))
def test_auth_scoped_token_bad_project_without_debug(self):
"""Authenticating with an invalid project fails."""
# Bug 1379952 reports poor user feedback, even in debug mode,
# when the user accidentally passes a project name as an ID.
# This test intentionally does exactly that.
body_dict = _build_user_auth(
username=self.user_foo['name'],
password=self.user_foo['password'],
tenant_id=self.tenant_bar['name'])
# with debug disabled, authentication failure details are suppressed.
self.config_fixture.config(debug=False)
e = self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
# explicitly verify that the error message details above have been
# suppressed.
self.assertNotIn(
'Project ID not found: %s' % self.tenant_bar['name'],
six.text_type(e))
def test_auth_token_project_group_role(self):
"""Verify getting a token in a tenant with group roles."""
# Add a v2 style role in so we can check we get this back
self.assignment_api.add_role_to_user_and_project(
self.user_foo['id'],
self.tenant_bar['id'],
self.role_member['id'])
# Now create a group role for this user as well
domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.resource_api.create_domain(domain1['id'], domain1)
new_group = {'domain_id': domain1['id'], 'name': uuid.uuid4().hex}
new_group = self.identity_api.create_group(new_group)
self.identity_api.add_user_to_group(self.user_foo['id'],
new_group['id'])
self.assignment_api.create_grant(
group_id=new_group['id'],
project_id=self.tenant_bar['id'],
role_id=self.role_admin['id'])
# Get a scoped token for the tenant
body_dict = _build_user_auth(
username='FOO',
password='foo2',
tenant_name="BAR")
scoped_token = self.controller.authenticate({}, body_dict)
tenant = scoped_token["access"]["token"]["tenant"]
roles = scoped_token["access"]["metadata"]["roles"]
self.assertEqual(self.tenant_bar['id'], tenant["id"])
self.assertIn(self.role_member['id'], roles)
self.assertIn(self.role_admin['id'], roles)
def test_belongs_to_no_tenant(self):
r = self.controller.authenticate(
{},
auth={
'passwordCredentials': {
'username': self.user_foo['name'],
'password': self.user_foo['password']
}
})
unscoped_token_id = r['access']['token']['id']
self.assertRaises(
exception.Unauthorized,
self.controller.validate_token,
dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
token_id=unscoped_token_id)
def test_belongs_to(self):
body_dict = _build_user_auth(
username='FOO',
password='foo2',
tenant_name="BAR")
scoped_token = self.controller.authenticate({}, body_dict)
scoped_token_id = scoped_token['access']['token']['id']
self.assertRaises(
exception.Unauthorized,
self.controller.validate_token,
dict(is_admin=True, query_string={'belongsTo': 'me'}),
token_id=scoped_token_id)
self.assertRaises(
exception.Unauthorized,
self.controller.validate_token,
dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
token_id=scoped_token_id)
def test_token_auth_with_binding(self):
self.config_fixture.config(group='token', bind=['kerberos'])
body_dict = _build_user_auth()
unscoped_token = self.controller.authenticate(
self.context_with_remote_user, body_dict)
# the token should have bind information in it
bind = unscoped_token['access']['token']['bind']
self.assertEqual('FOO', bind['kerberos'])
body_dict = _build_user_auth(
token=unscoped_token['access']['token'],
tenant_name='BAR')
# using unscoped token without remote user context fails
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
self.empty_context, body_dict)
# using token with remote user context succeeds
scoped_token = self.controller.authenticate(
self.context_with_remote_user, body_dict)
# the bind information should be carried over from the original token
bind = scoped_token['access']['token']['bind']
self.assertEqual('FOO', bind['kerberos'])
def test_deleting_role_revokes_token(self):
role_controller = assignment.controllers.Role()
project1 = {'id': 'Project1', 'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID}
self.resource_api.create_project(project1['id'], project1)
role_one = {'id': 'role_one', 'name': uuid.uuid4().hex}
self.role_api.create_role(role_one['id'], role_one)
self.assignment_api.add_role_to_user_and_project(
self.user_foo['id'], project1['id'], role_one['id'])
no_context = {}
# Get a scoped token for the tenant
body_dict = _build_user_auth(
username=self.user_foo['name'],
password=self.user_foo['password'],
tenant_name=project1['name'])
token = self.controller.authenticate(no_context, body_dict)
# Ensure it is valid
token_id = token['access']['token']['id']
self.controller.validate_token(
dict(is_admin=True, query_string={}),
token_id=token_id)
# Delete the role, which should invalidate the token
role_controller.delete_role(
dict(is_admin=True, query_string={}), role_one['id'])
# Check the token is now invalid
self.assertRaises(
exception.TokenNotFound,
self.controller.validate_token,
dict(is_admin=True, query_string={}),
token_id=token_id)
def test_deleting_role_assignment_does_not_revoke_unscoped_token(self):
no_context = {}
admin_context = dict(is_admin=True, query_string={})
project = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID}
self.resource_api.create_project(project['id'], project)
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.role_api.create_role(role['id'], role)
self.assignment_api.add_role_to_user_and_project(
self.user_foo['id'], project['id'], role['id'])
# Get an unscoped token.
token = self.controller.authenticate(no_context, _build_user_auth(
username=self.user_foo['name'],
password=self.user_foo['password']))
token_id = token['access']['token']['id']
# Ensure it is valid
self.controller.validate_token(admin_context, token_id=token_id)
# Delete the role assignment, which should not invalidate the token,
# because we're not consuming it with just an unscoped token.
self.assignment_api.remove_role_from_user_and_project(
self.user_foo['id'], project['id'], role['id'])
# Ensure it is still valid
self.controller.validate_token(admin_context, token_id=token_id)
def test_only_original_audit_id_is_kept(self):
context = {}
def get_audit_ids(token):
return token['access']['token']['audit_ids']
# get a token
body_dict = _build_user_auth(username='FOO', password='foo2')
unscoped_token = self.controller.authenticate(context, body_dict)
starting_audit_id = get_audit_ids(unscoped_token)[0]
self.assertIsNotNone(starting_audit_id)
# get another token to ensure the correct parent audit_id is set
body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
unscoped_token_2 = self.controller.authenticate(context, body_dict)
audit_ids = get_audit_ids(unscoped_token_2)
self.assertThat(audit_ids, matchers.HasLength(2))
self.assertThat(audit_ids[-1], matchers.Equals(starting_audit_id))
# get another token from token 2 and ensure the correct parent
# audit_id is set
body_dict = _build_user_auth(token=unscoped_token_2["access"]["token"])
unscoped_token_3 = self.controller.authenticate(context, body_dict)
audit_ids = get_audit_ids(unscoped_token_3)
self.assertThat(audit_ids, matchers.HasLength(2))
self.assertThat(audit_ids[-1], matchers.Equals(starting_audit_id))
def test_revoke_by_audit_chain_id_original_token(self):
self.config_fixture.config(group='token', revoke_by_id=False)
context = {}
# get a token
body_dict = _build_user_auth(username='FOO', password='foo2')
unscoped_token = self.controller.authenticate(context, body_dict)
token_id = unscoped_token['access']['token']['id']
self.time_fixture.advance_time_seconds(1)
# get a second token
body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
unscoped_token_2 = self.controller.authenticate(context, body_dict)
token_2_id = unscoped_token_2['access']['token']['id']
self.time_fixture.advance_time_seconds(1)
self.token_provider_api.revoke_token(token_id, revoke_chain=True)
self.assertRaises(exception.TokenNotFound,
self.token_provider_api.validate_v2_token,
token_id=token_id)
self.assertRaises(exception.TokenNotFound,
self.token_provider_api.validate_v2_token,
token_id=token_2_id)
def test_revoke_by_audit_chain_id_chained_token(self):
self.config_fixture.config(group='token', revoke_by_id=False)
context = {}
# get a token
body_dict = _build_user_auth(username='FOO', password='foo2')
unscoped_token = self.controller.authenticate(context, body_dict)
token_id = unscoped_token['access']['token']['id']
self.time_fixture.advance_time_seconds(1)
# get a second token
body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
unscoped_token_2 = self.controller.authenticate(context, body_dict)
token_2_id = unscoped_token_2['access']['token']['id']
self.time_fixture.advance_time_seconds(1)
self.token_provider_api.revoke_token(token_2_id, revoke_chain=True)
self.assertRaises(exception.TokenNotFound,
self.token_provider_api.validate_v2_token,
token_id=token_id)
self.assertRaises(exception.TokenNotFound,
self.token_provider_api.validate_v2_token,
token_id=token_2_id)
def _mock_audit_info(self, parent_audit_id):
# NOTE(morgainfainberg): The token model and other cases that are
# extracting the audit id expect 'None' if the audit id doesn't
# exist. This ensures that the audit_id is None and the
# audit_chain_id will also return None.
return [None, None]
def test_revoke_with_no_audit_info(self):
self.config_fixture.config(group='token', revoke_by_id=False)
context = {}
with mock.patch.object(provider, 'audit_info', self._mock_audit_info):
# get a token
body_dict = _build_user_auth(username='FOO', password='foo2')
unscoped_token = self.controller.authenticate(context, body_dict)
token_id = unscoped_token['access']['token']['id']
self.time_fixture.advance_time_seconds(1)
# get a second token
body_dict = _build_user_auth(
token=unscoped_token['access']['token'])
unscoped_token_2 = self.controller.authenticate(context, body_dict)
token_2_id = unscoped_token_2['access']['token']['id']
self.time_fixture.advance_time_seconds(1)
self.token_provider_api.revoke_token(token_id, revoke_chain=True)
self.time_fixture.advance_time_seconds(1)
revoke_events = self.revoke_api.list_events()
self.assertThat(revoke_events, matchers.HasLength(1))
revoke_event = revoke_events[0].to_dict()
self.assertIn('expires_at', revoke_event)
self.assertEqual(unscoped_token_2['access']['token']['expires'],
revoke_event['expires_at'])
self.assertRaises(exception.TokenNotFound,
self.token_provider_api.validate_v2_token,
token_id=token_id)
self.assertRaises(exception.TokenNotFound,
self.token_provider_api.validate_v2_token,
token_id=token_2_id)
# get a new token, with no audit info
body_dict = _build_user_auth(username='FOO', password='foo2')
unscoped_token = self.controller.authenticate(context, body_dict)
token_id = unscoped_token['access']['token']['id']
self.time_fixture.advance_time_seconds(1)
# get a second token
body_dict = _build_user_auth(
token=unscoped_token['access']['token'])
unscoped_token_2 = self.controller.authenticate(context, body_dict)
token_2_id = unscoped_token_2['access']['token']['id']
self.time_fixture.advance_time_seconds(1)
# Revoke by audit_id, no audit_info means both parent and child
# token are revoked.
self.token_provider_api.revoke_token(token_id)
self.time_fixture.advance_time_seconds(1)
revoke_events = self.revoke_api.list_events()
self.assertThat(revoke_events, matchers.HasLength(2))
revoke_event = revoke_events[1].to_dict()
self.assertIn('expires_at', revoke_event)
self.assertEqual(unscoped_token_2['access']['token']['expires'],
revoke_event['expires_at'])
self.assertRaises(exception.TokenNotFound,
self.token_provider_api.validate_v2_token,
token_id=token_id)
self.assertRaises(exception.TokenNotFound,
self.token_provider_api.validate_v2_token,
token_id=token_2_id)
class AuthWithPasswordCredentials(AuthTest):
def test_auth_invalid_user(self):
"""Verify exception is raised if invalid user."""
body_dict = _build_user_auth(
username=uuid.uuid4().hex,
password=uuid.uuid4().hex)
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
def test_auth_valid_user_invalid_password(self):
"""Verify exception is raised if invalid password."""
body_dict = _build_user_auth(
username="FOO",
password=uuid.uuid4().hex)
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
def test_auth_empty_password(self):
"""Verify exception is raised if empty password."""
body_dict = _build_user_auth(
username="FOO",
password="")
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{}, body_dict)
def test_auth_no_password(self):
"""Verify exception is raised if empty password."""
body_dict = _build_user_auth(username="FOO")
self.assertRaises(
exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_blank_password_credentials(self):
"""Sending empty dict as passwordCredentials raises a 400 error."""
body_dict = {'passwordCredentials': {}, 'tenantName': 'demo'}
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_authenticate_no_username(self):
"""Verify skipping username raises the right exception."""
body_dict = _build_user_auth(password="pass",
tenant_name="demo")
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
{}, body_dict)
def test_bind_without_remote_user(self):
self.config_fixture.config(group='token', bind=['kerberos'])
body_dict = _build_user_auth(username='FOO', password='foo2',
tenant_name='BAR')
token = self.controller.authenticate({}, body_dict)
self.assertNotIn('bind', token['access']['token'])
def test_change_default_domain_id(self):
# If the default_domain_id config option is not the default then the
# user in auth data is from the new default domain.
# 1) Create a new domain.
new_domain_id = uuid.uuid4().hex
new_domain = {
'description': uuid.uuid4().hex,
'enabled': True,
'id': new_domain_id,
'name': uuid.uuid4().hex,
}
self.resource_api.create_domain(new_domain_id, new_domain)
# 2) Create user "foo" in new domain with different password than
# default-domain foo.
new_user_password = uuid.uuid4().hex
new_user = {
'name': self.user_foo['name'],
'domain_id': new_domain_id,
'password': new_user_password,
'email': 'foo@bar2.com',
}
new_user = self.identity_api.create_user(new_user)
# 3) Update the default_domain_id config option to the new domain
self.config_fixture.config(group='identity',
default_domain_id=new_domain_id)
# 4) Authenticate as "foo" using the password in the new domain.
body_dict = _build_user_auth(
username=self.user_foo['name'],
password=new_user_password)
# The test is successful if this doesn't raise, so no need to assert.
self.controller.authenticate({}, body_dict)
class AuthWithRemoteUser(AuthTest):
def test_unscoped_remote_authn(self):
"""Verify getting an unscoped token with external authn."""
body_dict = _build_user_auth(
username='FOO',
password='foo2')
local_token = self.controller.authenticate(
{}, body_dict)
body_dict = _build_user_auth()
remote_token = self.controller.authenticate(
self.context_with_remote_user, body_dict)
self.assertEqualTokens(local_token, remote_token,
enforce_audit_ids=False)
def test_unscoped_remote_authn_jsonless(self):
"""Verify that external auth with invalid request fails."""
self.assertRaises(
exception.ValidationError,
self.controller.authenticate,
{'REMOTE_USER': 'FOO'},
None)
def test_scoped_remote_authn(self):
"""Verify getting a token with external authn."""
body_dict = _build_user_auth(
username='FOO',
password='foo2',
tenant_name='BAR')
local_token = self.controller.authenticate(
{}, body_dict)
body_dict = _build_user_auth(
tenant_name='BAR')
remote_token = self.controller.authenticate(
self.context_with_remote_user, body_dict)
self.assertEqualTokens(local_token, remote_token,
enforce_audit_ids=False)
def test_scoped_nometa_remote_authn(self):
"""Verify getting a token with external authn and no metadata."""
body_dict = _build_user_auth(
username='TWO',
password='two2',
tenant_name='BAZ')
local_token = self.controller.authenticate(
{}, body_dict)
body_dict = _build_user_auth(tenant_name='BAZ')
remote_token = self.controller.authenticate(
{'environment': {'REMOTE_USER': 'TWO'}}, body_dict)
self.assertEqualTokens(local_token, remote_token,
enforce_audit_ids=False)
def test_scoped_remote_authn_invalid_user(self):
"""Verify that external auth with invalid user fails."""
body_dict = _build_user_auth(tenant_name="BAR")
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
{'environment': {'REMOTE_USER': uuid.uuid4().hex}},
body_dict)
def test_bind_with_kerberos(self):
self.config_fixture.config(group='token', bind=['kerberos'])
body_dict = _build_user_auth(tenant_name="BAR")
token = self.controller.authenticate(self.context_with_remote_user,
body_dict)
self.assertEqual('FOO', token['access']['token']['bind']['kerberos'])
def test_bind_without_config_opt(self):
self.config_fixture.config(group='token', bind=['x509'])
body_dict = _build_user_auth(tenant_name='BAR')
token = self.controller.authenticate(self.context_with_remote_user,
body_dict)
self.assertNotIn('bind', token['access']['token'])
class AuthWithTrust(AuthTest):
def setUp(self):
super(AuthWithTrust, self).setUp()
self.trust_controller = trust.controllers.TrustV3()
self.auth_v3_controller = auth.controllers.Auth()
self.trustor = self.user_foo
self.trustee = self.user_two
self.assigned_roles = [self.role_member['id'],
self.role_browser['id']]
for assigned_role in self.assigned_roles:
self.assignment_api.add_role_to_user_and_project(
self.trustor['id'], self.tenant_bar['id'], assigned_role)
self.sample_data = {'trustor_user_id': self.trustor['id'],
'trustee_user_id': self.trustee['id'],
'project_id': self.tenant_bar['id'],
'impersonation': True,
'roles': [{'id': self.role_browser['id']},
{'name': self.role_member['name']}]}
def config_overrides(self):
super(AuthWithTrust, self).config_overrides()
self.config_fixture.config(group='trust', enabled=True)
def _create_auth_context(self, token_id):
token_ref = token_model.KeystoneToken(
token_id=token_id,
token_data=self.token_provider_api.validate_token(token_id))
auth_context = authorization.token_to_auth_context(token_ref)
return {'environment': {authorization.AUTH_CONTEXT_ENV: auth_context},
'token_id': token_id,
'host_url': HOST_URL}
def create_trust(self, trust_data, trustor_name, expires_at=None,
impersonation=True):
username = trustor_name
password = 'foo2'
unscoped_token = self.get_unscoped_token(username, password)
context = self._create_auth_context(
unscoped_token['access']['token']['id'])
trust_data_copy = copy.deepcopy(trust_data)
trust_data_copy['expires_at'] = expires_at
trust_data_copy['impersonation'] = impersonation
return self.trust_controller.create_trust(
context, trust=trust_data_copy)['trust']
def get_unscoped_token(self, username, password='foo2'):
body_dict = _build_user_auth(username=username, password=password)
return self.controller.authenticate({}, body_dict)
def build_v2_token_request(self, username, password, trust,
tenant_id=None):
if not tenant_id:
tenant_id = self.tenant_bar['id']
unscoped_token = self.get_unscoped_token(username, password)
unscoped_token_id = unscoped_token['access']['token']['id']
request_body = _build_user_auth(token={'id': unscoped_token_id},
trust_id=trust['id'],
tenant_id=tenant_id)
return request_body
def test_create_trust_bad_data_fails(self):
unscoped_token = self.get_unscoped_token(self.trustor['name'])
context = self._create_auth_context(
unscoped_token['access']['token']['id'])
bad_sample_data = {'trustor_user_id': self.trustor['id'],
'project_id': self.tenant_bar['id'],
'roles': [{'id': self.role_browser['id']}]}
self.assertRaises(exception.ValidationError,
self.trust_controller.create_trust,
context, trust=bad_sample_data)
def test_create_trust_no_roles(self):
unscoped_token = self.get_unscoped_token(self.trustor['name'])
context = {'token_id': unscoped_token['access']['token']['id']}
self.sample_data['roles'] = []
self.assertRaises(exception.Forbidden,
self.trust_controller.create_trust,
context, trust=self.sample_data)
def test_create_trust(self):
expires_at = (timeutils.utcnow() +
datetime.timedelta(minutes=10)).strftime(TIME_FORMAT)
new_trust = self.create_trust(self.sample_data, self.trustor['name'],
expires_at=expires_at)
self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
role_ids = [self.role_browser['id'], self.role_member['id']]
self.assertTrue(timeutils.parse_strtime(new_trust['expires_at'],
fmt=TIME_FORMAT))
self.assertIn('%s/v3/OS-TRUST/' % HOST_URL,
new_trust['links']['self'])
self.assertIn('%s/v3/OS-TRUST/' % HOST_URL,
new_trust['roles_links']['self'])
for role in new_trust['roles']:
self.assertIn(role['id'], role_ids)
def test_create_trust_expires_bad(self):
self.assertRaises(exception.ValidationTimeStampError,
self.create_trust, self.sample_data,
self.trustor['name'], expires_at="bad")
self.assertRaises(exception.ValidationTimeStampError,
self.create_trust, self.sample_data,
self.trustor['name'], expires_at="")
self.assertRaises(exception.ValidationTimeStampError,
self.create_trust, self.sample_data,
self.trustor['name'], expires_at="Z")
def test_create_trust_expires_older_than_now(self):
self.assertRaises(exception.ValidationExpirationError,
self.create_trust, self.sample_data,
self.trustor['name'],
expires_at="2010-06-04T08:44:31.999999Z")
def test_create_trust_without_project_id(self):
"""Verify that trust can be created without project id and
token can be generated with that trust.
"""
unscoped_token = self.get_unscoped_token(self.trustor['name'])
context = self._create_auth_context(
unscoped_token['access']['token']['id'])
self.sample_data['project_id'] = None
self.sample_data['roles'] = []
new_trust = self.trust_controller.create_trust(
context, trust=self.sample_data)['trust']
self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
self.assertIs(new_trust['impersonation'], True)
auth_response = self.fetch_v2_token_from_trust(new_trust)
token_user = auth_response['access']['user']
self.assertEqual(token_user['id'], new_trust['trustor_user_id'])
def test_get_trust(self):
unscoped_token = self.get_unscoped_token(self.trustor['name'])
context = self._create_auth_context(
unscoped_token['access']['token']['id'])
new_trust = self.trust_controller.create_trust(
context, trust=self.sample_data)['trust']
trust = self.trust_controller.get_trust(context,
new_trust['id'])['trust']
self.assertEqual(self.trustor['id'], trust['trustor_user_id'])
self.assertEqual(self.trustee['id'], trust['trustee_user_id'])
role_ids = [self.role_browser['id'], self.role_member['id']]
for role in new_trust['roles']:
self.assertIn(role['id'], role_ids)
def test_get_trust_without_auth_context(self):
"""Verify that a trust cannot be retrieved when the auth context is
missing.
"""
unscoped_token = self.get_unscoped_token(self.trustor['name'])
context = self._create_auth_context(
unscoped_token['access']['token']['id'])
new_trust = self.trust_controller.create_trust(
context, trust=self.sample_data)['trust']
# Delete the auth context before calling get_trust().
del context['environment'][authorization.AUTH_CONTEXT_ENV]
self.assertRaises(exception.Forbidden,
self.trust_controller.get_trust, context,
new_trust['id'])
def test_create_trust_no_impersonation(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'],
expires_at=None, impersonation=False)
self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
self.assertIs(new_trust['impersonation'], False)
auth_response = self.fetch_v2_token_from_trust(new_trust)
token_user = auth_response['access']['user']
self.assertEqual(token_user['id'], new_trust['trustee_user_id'])
# TODO(ayoung): Endpoints
def test_create_trust_impersonation(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
self.assertIs(new_trust['impersonation'], True)
auth_response = self.fetch_v2_token_from_trust(new_trust)
token_user = auth_response['access']['user']
self.assertEqual(token_user['id'], new_trust['trustor_user_id'])
def test_token_from_trust_wrong_user_fails(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
request_body = self.build_v2_token_request('FOO', 'foo2', new_trust)
self.assertRaises(exception.Forbidden, self.controller.authenticate,
{}, request_body)
def test_token_from_trust_wrong_project_fails(self):
for assigned_role in self.assigned_roles:
self.assignment_api.add_role_to_user_and_project(
self.trustor['id'], self.tenant_baz['id'], assigned_role)
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
request_body = self.build_v2_token_request('TWO', 'two2', new_trust,
self.tenant_baz['id'])
self.assertRaises(exception.Forbidden, self.controller.authenticate,
{}, request_body)
def fetch_v2_token_from_trust(self, trust):
request_body = self.build_v2_token_request('TWO', 'two2', trust)
auth_response = self.controller.authenticate({}, request_body)
return auth_response
def fetch_v3_token_from_trust(self, trust, trustee):
v3_password_data = {
'identity': {
"methods": ["password"],
"password": {
"user": {
"id": trustee["id"],
"password": trustee["password"]
}
}
},
'scope': {
'project': {
'id': self.tenant_baz['id']
}
}
}
auth_response = (self.auth_v3_controller.authenticate_for_token
({'environment': {},
'query_string': {}},
v3_password_data))
token = auth_response.headers['X-Subject-Token']
v3_req_with_trust = {
"identity": {
"methods": ["token"],
"token": {"id": token}},
"scope": {
"OS-TRUST:trust": {"id": trust['id']}}}
token_auth_response = (self.auth_v3_controller.authenticate_for_token
({'environment': {},
'query_string': {}},
v3_req_with_trust))
return token_auth_response
def test_create_v3_token_from_trust(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
auth_response = self.fetch_v3_token_from_trust(new_trust, self.trustee)
trust_token_user = auth_response.json['token']['user']
self.assertEqual(self.trustor['id'], trust_token_user['id'])
trust_token_trust = auth_response.json['token']['OS-TRUST:trust']
self.assertEqual(trust_token_trust['id'], new_trust['id'])
self.assertEqual(self.trustor['id'],
trust_token_trust['trustor_user']['id'])
self.assertEqual(self.trustee['id'],
trust_token_trust['trustee_user']['id'])
trust_token_roles = auth_response.json['token']['roles']
self.assertEqual(2, len(trust_token_roles))
def test_v3_trust_token_get_token_fails(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
auth_response = self.fetch_v3_token_from_trust(new_trust, self.trustee)
trust_token = auth_response.headers['X-Subject-Token']
v3_token_data = {'identity': {
'methods': ['token'],
'token': {'id': trust_token}
}}
self.assertRaises(
exception.Forbidden,
self.auth_v3_controller.authenticate_for_token,
{'environment': {},
'query_string': {}}, v3_token_data)
def test_token_from_trust(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
auth_response = self.fetch_v2_token_from_trust(new_trust)
self.assertIsNotNone(auth_response)
self.assertEqual(2,
len(auth_response['access']['metadata']['roles']),
"user_foo has three roles, but the token should"
" only get the two roles specified in the trust.")
def assert_token_count_for_trust(self, trust, expected_value):
tokens = self.token_provider_api._persistence._list_tokens(
self.trustee['id'], trust_id=trust['id'])
token_count = len(tokens)
self.assertEqual(expected_value, token_count)
def test_delete_tokens_for_user_invalidates_tokens_from_trust(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
self.assert_token_count_for_trust(new_trust, 0)
self.fetch_v2_token_from_trust(new_trust)
self.assert_token_count_for_trust(new_trust, 1)
self.token_provider_api._persistence.delete_tokens_for_user(
self.trustee['id'])
self.assert_token_count_for_trust(new_trust, 0)
def test_token_from_trust_cant_get_another_token(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
auth_response = self.fetch_v2_token_from_trust(new_trust)
trust_token_id = auth_response['access']['token']['id']
request_body = _build_user_auth(token={'id': trust_token_id},
tenant_id=self.tenant_bar['id'])
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
def test_delete_trust_revokes_token(self):
unscoped_token = self.get_unscoped_token(self.trustor['name'])
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
context = self._create_auth_context(
unscoped_token['access']['token']['id'])
self.fetch_v2_token_from_trust(new_trust)
trust_id = new_trust['id']
tokens = self.token_provider_api._persistence._list_tokens(
self.trustor['id'],
trust_id=trust_id)
self.assertEqual(1, len(tokens))
self.trust_controller.delete_trust(context, trust_id=trust_id)
tokens = self.token_provider_api._persistence._list_tokens(
self.trustor['id'],
trust_id=trust_id)
self.assertEqual(0, len(tokens))
def test_token_from_trust_with_no_role_fails(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
for assigned_role in self.assigned_roles:
self.assignment_api.remove_role_from_user_and_project(
self.trustor['id'], self.tenant_bar['id'], assigned_role)
request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
def test_expired_trust_get_token_fails(self):
expires_at = (timeutils.utcnow() +
datetime.timedelta(minutes=5)).strftime(TIME_FORMAT)
time_expired = timeutils.utcnow() + datetime.timedelta(minutes=10)
new_trust = self.create_trust(self.sample_data, self.trustor['name'],
expires_at)
with mock.patch.object(timeutils, 'utcnow') as mock_now:
mock_now.return_value = time_expired
request_body = self.build_v2_token_request('TWO', 'two2',
new_trust)
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
def test_token_from_trust_with_wrong_role_fails(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
self.assignment_api.add_role_to_user_and_project(
self.trustor['id'],
self.tenant_bar['id'],
self.role_other['id'])
for assigned_role in self.assigned_roles:
self.assignment_api.remove_role_from_user_and_project(
self.trustor['id'], self.tenant_bar['id'], assigned_role)
request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
def test_do_not_consume_remaining_uses_when_get_token_fails(self):
trust_data = copy.deepcopy(self.sample_data)
trust_data['remaining_uses'] = 3
new_trust = self.create_trust(trust_data, self.trustor['name'])
for assigned_role in self.assigned_roles:
self.assignment_api.remove_role_from_user_and_project(
self.trustor['id'], self.tenant_bar['id'], assigned_role)
request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
self.assertRaises(exception.Forbidden,
self.controller.authenticate, {}, request_body)
unscoped_token = self.get_unscoped_token(self.trustor['name'])
context = self._create_auth_context(
unscoped_token['access']['token']['id'])
trust = self.trust_controller.get_trust(context,
new_trust['id'])['trust']
self.assertEqual(3, trust['remaining_uses'])
def test_v2_trust_token_contains_trustor_user_id_and_impersonation(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
auth_response = self.fetch_v2_token_from_trust(new_trust)
self.assertEqual(new_trust['trustee_user_id'],
auth_response['access']['trust']['trustee_user_id'])
self.assertEqual(new_trust['trustor_user_id'],
auth_response['access']['trust']['trustor_user_id'])
self.assertEqual(new_trust['impersonation'],
auth_response['access']['trust']['impersonation'])
self.assertEqual(new_trust['id'],
auth_response['access']['trust']['id'])
validate_response = self.controller.validate_token(
context=dict(is_admin=True, query_string={}),
token_id=auth_response['access']['token']['id'])
self.assertEqual(
new_trust['trustee_user_id'],
validate_response['access']['trust']['trustee_user_id'])
self.assertEqual(
new_trust['trustor_user_id'],
validate_response['access']['trust']['trustor_user_id'])
self.assertEqual(
new_trust['impersonation'],
validate_response['access']['trust']['impersonation'])
self.assertEqual(
new_trust['id'],
validate_response['access']['trust']['id'])
def disable_user(self, user):
user['enabled'] = False
self.identity_api.update_user(user['id'], user)
def test_trust_get_token_fails_if_trustor_disabled(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
request_body = self.build_v2_token_request(self.trustee['name'],
self.trustee['password'],
new_trust)
self.disable_user(self.trustor)
self.assertRaises(
exception.Forbidden,
self.controller.authenticate, {}, request_body)
def test_trust_get_token_fails_if_trustee_disabled(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
request_body = self.build_v2_token_request(self.trustee['name'],
self.trustee['password'],
new_trust)
self.disable_user(self.trustee)
self.assertRaises(
exception.Unauthorized,
self.controller.authenticate, {}, request_body)
class TokenExpirationTest(AuthTest):
@mock.patch.object(timeutils, 'utcnow')
def _maintain_token_expiration(self, mock_utcnow):
"""Token expiration should be maintained after re-auth & validation."""
now = datetime.datetime.utcnow()
mock_utcnow.return_value = now
r = self.controller.authenticate(
{},
auth={
'passwordCredentials': {
'username': self.user_foo['name'],
'password': self.user_foo['password']
}
})
unscoped_token_id = r['access']['token']['id']
original_expiration = r['access']['token']['expires']
mock_utcnow.return_value = now + datetime.timedelta(seconds=1)
r = self.controller.validate_token(
dict(is_admin=True, query_string={}),
token_id=unscoped_token_id)
self.assertEqual(original_expiration, r['access']['token']['expires'])
mock_utcnow.return_value = now + datetime.timedelta(seconds=2)
r = self.controller.authenticate(
{},
auth={
'token': {
'id': unscoped_token_id,
},
'tenantId': self.tenant_bar['id'],
})
scoped_token_id = r['access']['token']['id']
self.assertEqual(original_expiration, r['access']['token']['expires'])
mock_utcnow.return_value = now + datetime.timedelta(seconds=3)
r = self.controller.validate_token(
dict(is_admin=True, query_string={}),
token_id=scoped_token_id)
self.assertEqual(original_expiration, r['access']['token']['expires'])
def test_maintain_uuid_token_expiration(self):
self.config_fixture.config(group='token', provider='uuid')
self._maintain_token_expiration()
class AuthCatalog(unit.SQLDriverOverrides, AuthTest):
"""Tests for the catalog provided in the auth response."""
def config_files(self):
config_files = super(AuthCatalog, self).config_files()
# We need to use a backend that supports disabled endpoints, like the
# SQL backend.
config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
return config_files
def _create_endpoints(self):
def create_region(**kwargs):
ref = {'id': uuid.uuid4().hex}
ref.update(kwargs)
self.catalog_api.create_region(ref)
return ref
def create_endpoint(service_id, region, **kwargs):
id_ = uuid.uuid4().hex
ref = {
'id': id_,
'interface': 'public',
'region_id': region,
'service_id': service_id,
'url': 'http://localhost/%s' % uuid.uuid4().hex,
}
ref.update(kwargs)
self.catalog_api.create_endpoint(id_, ref)
return ref
# Create a service for use with the endpoints.
def create_service(**kwargs):
id_ = uuid.uuid4().hex
ref = {
'id': id_,
'name': uuid.uuid4().hex,
'type': uuid.uuid4().hex,
}
ref.update(kwargs)
self.catalog_api.create_service(id_, ref)
return ref
enabled_service_ref = create_service(enabled=True)
disabled_service_ref = create_service(enabled=False)
region = create_region()
# Create endpoints
enabled_endpoint_ref = create_endpoint(
enabled_service_ref['id'], region['id'])
create_endpoint(
enabled_service_ref['id'], region['id'], enabled=False,
interface='internal')
create_endpoint(
disabled_service_ref['id'], region['id'])
return enabled_endpoint_ref
def test_auth_catalog_disabled_endpoint(self):
"""On authenticate, get a catalog that excludes disabled endpoints."""
endpoint_ref = self._create_endpoints()
# Authenticate
body_dict = _build_user_auth(
username='FOO',
password='foo2',
tenant_name="BAR")
token = self.controller.authenticate({}, body_dict)
# Check the catalog
self.assertEqual(1, len(token['access']['serviceCatalog']))
endpoint = token['access']['serviceCatalog'][0]['endpoints'][0]
self.assertEqual(
1, len(token['access']['serviceCatalog'][0]['endpoints']))
exp_endpoint = {
'id': endpoint_ref['id'],
'publicURL': endpoint_ref['url'],
'region': endpoint_ref['region_id'],
}
self.assertEqual(exp_endpoint, endpoint)
def test_validate_catalog_disabled_endpoint(self):
"""On validate, get back a catalog that excludes disabled endpoints."""
endpoint_ref = self._create_endpoints()
# Authenticate
body_dict = _build_user_auth(
username='FOO',
password='foo2',
tenant_name="BAR")
token = self.controller.authenticate({}, body_dict)
# Validate
token_id = token['access']['token']['id']
validate_ref = self.controller.validate_token(
dict(is_admin=True, query_string={}),
token_id=token_id)
# Check the catalog
self.assertEqual(1, len(token['access']['serviceCatalog']))
endpoint = validate_ref['access']['serviceCatalog'][0]['endpoints'][0]
self.assertEqual(
1, len(token['access']['serviceCatalog'][0]['endpoints']))
exp_endpoint = {
'id': endpoint_ref['id'],
'publicURL': endpoint_ref['url'],
'region': endpoint_ref['region_id'],
}
self.assertEqual(exp_endpoint, endpoint)
class NonDefaultAuthTest(unit.TestCase):
def test_add_non_default_auth_method(self):
self.config_fixture.config(group='auth',
methods=['password', 'token', 'custom'])
config.setup_authentication()
self.assertTrue(hasattr(CONF.auth, 'custom'))
| takeshineshiro/keystone | keystone/tests/unit/test_auth.py | Python | apache-2.0 | 61,769 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Visibility analysis for QGIS (**to be installed in ViewshedAnalysis folder**)
-------------------
begin : 2018-03-18
copyright : (C) 2018 by Z. Cuckovic
homepage : https://zoran-cuckovic.github.io/QGIS-visibility-analysis/
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
__author__ = 'AUthor'
__date__ = '2018-03-18'
__copyright__ = '(C) 2018 by AUthor'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import sys
import inspect
from qgis.core import QgsProcessingAlgorithm, QgsApplication
from .visibility_provider import VisibilityProvider
## this was making problems ??
##cmd_folder = os.path.split(inspect.getfile(inspect.currentframe()))[0]
##
##if cmd_folder not in sys.path:
## sys.path.insert(0, cmd_folder)
class VisibilityPlugin:
def __init__(self, iface):
self.iface = iface
self.provider = VisibilityProvider()
def initGui(self):
self.initProcessing()
def unload(self):
QgsApplication.processingRegistry().removeProvider(self.provider)
def initProcessing(self):
QgsApplication.processingRegistry().addProvider(self.provider)
| zoran-cuckovic/QGIS-visibility-analysis | visibility_plugin.py | Python | gpl-3.0 | 2,064 |
# python
from datetime import datetime
import time
# sql
from sqlalchemy import create_engine
from sqlalchemy.sql.expression import ClauseElement, desc
from sqlalchemy.orm import relationship, backref, scoped_session, sessionmaker
from sqlalchemy import (Column, Integer, String, Table,
DateTime, Boolean, Float, ForeignKey)
from sqlalchemy.ext.declarative import declarative_base
# this project
import settings
engine = create_engine('sqlite:///database.sqlite3', echo=False)
Base = declarative_base()
db = scoped_session(sessionmaker(bind=engine))
association_table = Table('association', Base.metadata,
Column('job_id', Integer, ForeignKey('jobs.id')),
Column('node_id', Integer, ForeignKey('nodes.id')))
__all__ = ['Job', 'Node', 'Cluster', 'Snapshot', 'Queue',
'get_or_create', 'create_all', 'add_jobs_from_dict',
'add_nodes_from_dict', 'engine', 'db']
class Job(Base):
__tablename__ = 'jobs'
id = Column(Integer, primary_key=True)
job_id = Column(Integer, nullable=False)
name = Column(String(200), nullable=False)
user = Column(String(200), nullable=False)
processors = Column(Integer)
priority = Column(Integer)
status = Column(String(200), nullable=False)
n_nodes = Column(Integer, nullable=False)
error = Column(String(200), nullable=True)
# three many-> one
queue_id = Column(Integer, ForeignKey('queues.id'))
queue = relationship("Queue", backref='jobs', order_by=id)
cluster_id = Column(Integer, ForeignKey('clusters.id'))
cluster = relationship("Cluster", backref='jobs', order_by=id)
snapshot_id = Column(Integer, ForeignKey('snapshots.id'))
snapshot = relationship("Snapshot", backref='jobs', order_by=id)
def __repr__(self):
return '< Job: %s name=%s user=%s, queue=%s, snapshot=%s>' % (self.job_id,
self.name, self.user, self.queue.name, self.snapshot_id)
def to_dict(self):
raise NotImplementedError
class Node(Base):
__tablename__ = 'nodes'
id = Column(Integer, primary_key=True)
name = Column(String(200), nullable=False)
state = Column(String(200), nullable=False)
load = Column(Float)
n_procs = Column(Integer)
n_running = Column(Integer)
# many nodes -> one job
jobs = relationship("Job", secondary=association_table,
backref="nodes")
cluster_id = Column(Integer, ForeignKey('clusters.id'))
cluster = relationship("Cluster", backref='nodes', order_by=id)
# many nodes -> one snapshot
snapshot_id = Column(Integer, ForeignKey('snapshots.id'))
snapshot = relationship("Snapshot", backref='nodes', order_by=id)
def __repr__(self):
return '< Node:%s -- state=%s, n_procs=%s, n_running=%s, snapshot=%s >' % \
(self.name, self.state, self.n_procs, self.n_running,
self.snapshot_id)
def to_dict(self):
return {'cluster': self.cluster.name,
'load': self.load,
'n_procs': self.n_procs,
'n_running': self.n_running,
'name': self.name,
'state': self.state}
class Queue(Base):
__tablename__ = 'queues'
id = Column(Integer, primary_key=True)
name = Column(String(200), nullable=False)
# many queues -> one node
node_id = Column(Integer, ForeignKey('nodes.id'))
node = relationship('Node', backref='queues', order_by=id)
# many queues -> one cluster
cluster_id = Column(Integer, ForeignKey('clusters.id'))
cluster = relationship("Cluster", backref='queues', order_by=id)
def __repr__(self):
return '< Queue: %s >' % (self.name, )
class Cluster(Base):
__tablename__ = 'clusters'
id = Column(Integer, primary_key=True)
name = Column(String(200), nullable=False)
def __repr__(self):
return '< Cluster name=%s >' % (self.name,)
class Snapshot(Base):
__tablename__ = 'snapshots'
id = Column(Integer, primary_key=True)
# unix timestamp
timestamp = Column(Float)
def __init__(self):
self.timestamp = time.time()
@property
def time(self):
return datetime.fromtimestamp(self.timestamp, settings.timezone)
def __repr__(self):
return '< Snapshot: time=%s >' % (self.time.strftime('%c'),)
def create_all():
Base.metadata.create_all(engine)
def get_or_create(session, model, **kwargs):
#http://stackoverflow.com/questions/2546207/does-sqlalchemy-have-an-equivalent-of-djangos-get-or-create
instance = session.query(model).filter_by(**kwargs).first()
if instance:
return instance, False
else:
params = dict((k, v) for k, v in kwargs.iteritems() if not isinstance(v, ClauseElement))
instance = model(**params)
session.add(instance)
return instance, True
def add_jobs_from_dict(db, jobslist, snapshot, cluster):
"add data sent by the daemons to the database"
for j in jobslist:
keys = ['status', 'priority', 'processors', 'nodes',
'user', 'error', 'name', 'job_id', 'n_nodes']
sanitized = dict(((k, v) for k, v in j.iteritems() if k in keys))
job = Job(**sanitized)
job.cluster = cluster
job.snapshot = snapshot
# add the queue that this job is on
q, _ = get_or_create(db, Queue, name=j['queue'], cluster=cluster)
job.queue = q
db.add(job)
def add_nodes_from_dict(db, nodeslist, snapshot, cluster):
"add data sent by the daemons to the database"
n_nodes = db.query(Node).count()
for n in nodeslist:
keys = ['name', 'state', 'load', 'n_procs', 'n_running']
sanitized = dict(((k, v) for k, v in n.iteritems() if k in keys))
node = Node(**sanitized)
node.cluster = cluster
node.snapshot = snapshot
# connect the node to the jobs
for job_id in n['job_ids']:
node.jobs.append(db.query(Job).filter_by(job_id=job_id, cluster=cluster,
snapshot=snapshot).first())
# register what queues this node acts on
for queue_name in n['queues']:
q, _ = get_or_create(db, Queue, name=queue_name, cluster=cluster)
node.queues.append(q)
db.add(node)
db.flush()
print 'added %s nodes' % (db.query(Node).count() - n_nodes)
def _testbuild():
"testing code"
import datetime
import json
with open('etc/dump.json') as f:
report = json.load(f)
snapshot = Snapshot(time=datetime.datetime.now())
cluster = Cluster(name='test')
add_jobs_from_dict(db, report['jobs'], snapshot, cluster)
add_nodes_from_dict(db, report['nodes'], snapshot, cluster)
db.commit()
if __name__ == '__main__':
"testing code"
import analytics
create_all()
#_testbuild()
print analytics.procs_by_user(db, db.query(Cluster).filter_by(name='test').first())
| rmcgibbo/webstat | webstat/models.py | Python | gpl-3.0 | 6,995 |
# Copyright (c) 2012 - 2014 EMC Corporation, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fibre Channel Driver for EMC VNX array based on CLI.
"""
from cinder.openstack.common import log as logging
from cinder.volume import driver
from cinder.volume.drivers.emc import emc_vnx_cli
from cinder.zonemanager.utils import AddFCZone
from cinder.zonemanager.utils import RemoveFCZone
LOG = logging.getLogger(__name__)
class EMCCLIFCDriver(driver.FibreChannelDriver):
"""EMC FC Driver for VNX using CLI.
Version history:
1.0.0 - Initial driver
2.0.0 - Thick/thin provisioning, robust enhancement
3.0.0 - Array-based Backend Support, FC Basic Support,
Target Port Selection for MPIO,
Initiator Auto Registration,
Storage Group Auto Deletion,
Multiple Authentication Type Support,
Storage-Assisted Volume Migration,
SP Toggle for HA
3.0.1 - Security File Support
4.0.0 - Advance LUN Features (Compression Support,
Deduplication Support, FAST VP Support,
FAST Cache Support), Storage-assisted Retype,
External Volume Management, Read-only Volume,
FC Auto Zoning
4.1.0 - Consistency group support
"""
def __init__(self, *args, **kwargs):
super(EMCCLIFCDriver, self).__init__(*args, **kwargs)
self.cli = emc_vnx_cli.getEMCVnxCli(
'FC',
configuration=self.configuration)
self.VERSION = self.cli.VERSION
def check_for_setup_error(self):
pass
def create_volume(self, volume):
"""Creates a volume."""
return self.cli.create_volume(volume)
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
return self.cli.create_volume_from_snapshot(volume, snapshot)
def create_cloned_volume(self, volume, src_vref):
"""Creates a cloned volume."""
return self.cli.create_cloned_volume(volume, src_vref)
def extend_volume(self, volume, new_size):
"""Extend a volume."""
self.cli.extend_volume(volume, new_size)
def delete_volume(self, volume):
"""Deletes a volume."""
self.cli.delete_volume(volume)
def migrate_volume(self, ctxt, volume, host):
"""Migrate volume via EMC migration functionality."""
return self.cli.migrate_volume(ctxt, volume, host)
def retype(self, ctxt, volume, new_type, diff, host):
"""Convert the volume to be of the new type."""
return self.cli.retype(ctxt, volume, new_type, diff, host)
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
self.cli.create_snapshot(snapshot)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
self.cli.delete_snapshot(snapshot)
def ensure_export(self, context, volume):
"""Driver entry point to get the export info for an existing volume."""
pass
def create_export(self, context, volume):
"""Driver entry point to get the export info for a new volume."""
pass
def remove_export(self, context, volume):
"""Driver entry point to remove an export for a volume."""
pass
def check_for_export(self, context, volume_id):
"""Make sure volume is exported."""
pass
@AddFCZone
def initialize_connection(self, volume, connector):
"""Initializes the connection and returns connection info.
Assign any created volume to a compute node/host so that it can be
used from that host.
The driver returns a driver_volume_type of 'fibre_channel'.
The target_wwn can be a single entry or a list of wwns that
correspond to the list of remote wwn(s) that will export the volume.
The initiator_target_map is a map that represents the remote wwn(s)
and a list of wwns which are visiable to the remote wwn(s).
Example return values:
{
'driver_volume_type': 'fibre_channel'
'data': {
'target_discovered': True,
'target_lun': 1,
'target_wwn': '1234567890123',
'access_mode': 'rw'
'initiator_target_map': {
'1122334455667788': ['1234567890123']
}
}
}
or
{
'driver_volume_type': 'fibre_channel'
'data': {
'target_discovered': True,
'target_lun': 1,
'target_wwn': ['1234567890123', '0987654321321'],
'access_mode': 'rw'
'initiator_target_map': {
'1122334455667788': ['1234567890123',
'0987654321321']
}
}
}
"""
conn_info = self.cli.initialize_connection(volume,
connector)
conn_info = self.cli.adjust_fc_conn_info(conn_info, connector)
LOG.debug("Exit initialize_connection"
" - Returning FC connection info: %(conn_info)s."
% {'conn_info': conn_info})
return conn_info
@RemoveFCZone
def terminate_connection(self, volume, connector, **kwargs):
"""Disallow connection from connector."""
remove_zone = self.cli.terminate_connection(volume, connector)
conn_info = {'driver_volume_type': 'fibre_channel',
'data': {}}
conn_info = self.cli.adjust_fc_conn_info(conn_info, connector,
remove_zone)
LOG.debug("Exit terminate_connection"
" - Returning FC connection info: %(conn_info)s."
% {'conn_info': conn_info})
return conn_info
def get_volume_stats(self, refresh=False):
"""Get volume stats.
If 'refresh' is True, run update the stats first.
"""
if refresh:
self.update_volume_stats()
return self._stats
def update_volume_stats(self):
"""Retrieve stats info from volume group."""
LOG.debug("Updating volume stats.")
data = self.cli.update_volume_stats()
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or 'EMCCLIFCDriver'
data['storage_protocol'] = 'FC'
self._stats = data
def manage_existing(self, volume, existing_ref):
"""Manage an existing lun in the array.
The lun should be in a manageable pool backend, otherwise
error would return.
Rename the backend storage object so that it matches the,
volume['name'] which is how drivers traditionally map between a
cinder volume and the associated backend storage object.
existing_ref:{
'id':lun_id
}
"""
LOG.debug("Reference lun id %s." % existing_ref['id'])
self.cli.manage_existing(volume, existing_ref)
def manage_existing_get_size(self, volume, existing_ref):
"""Return size of volume to be managed by manage_existing.
"""
return self.cli.manage_existing_get_size(volume, existing_ref)
def create_consistencygroup(self, context, group):
"""Creates a consistencygroup."""
return self.cli.create_consistencygroup(context, group)
def delete_consistencygroup(self, context, group):
"""Deletes a consistency group."""
return self.cli.delete_consistencygroup(
self, context, group)
def create_cgsnapshot(self, context, cgsnapshot):
"""Creates a cgsnapshot."""
return self.cli.create_cgsnapshot(
self, context, cgsnapshot)
def delete_cgsnapshot(self, context, cgsnapshot):
"""Deletes a cgsnapshot."""
return self.cli.delete_cgsnapshot(self, context, cgsnapshot) | hybrid-storage-dev/cinder-fs-111t-hybrid-cherry | volume/drivers/emc/emc_cli_fc.py | Python | apache-2.0 | 8,684 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-15 23:07
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('full_name', models.CharField(max_length=250)),
('email', models.EmailField(max_length=254)),
('is_admin', models.BooleanField(default=False)),
('password', models.CharField(max_length=250)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Employee',
fields=[
('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='users.User')),
],
options={
'abstract': False,
},
bases=('users.user',),
),
]
| tassolom/twq-app | api/users/migrations/0001_initial.py | Python | mit | 1,356 |
class SASTask:
def __init__(self, variables, init, goal, operators, axioms, metric):
self.variables = variables
self.init = init
self.goal = goal
self.operators = operators
self.axioms = axioms
self.metric = metric
def output(self, stream):
print >> stream, "begin_metric"
print >> stream, int(self.metric)
print >> stream, "end_metric"
self.variables.output(stream)
self.init.output(stream)
self.goal.output(stream)
print >> stream, len(self.operators)
for op in self.operators:
op.output(stream)
print >> stream, len(self.axioms)
for axiom in self.axioms:
axiom.output(stream)
class SASVariables:
def __init__(self, ranges, axiom_layers):
self.ranges = ranges
self.axiom_layers = axiom_layers
def dump(self):
for var, (rang, axiom_layer) in enumerate(zip(self.ranges, self.axiom_layers)):
if axiom_layer != -1:
axiom_str = " [axiom layer %d]" % axiom_layer
else:
axiom_str = ""
print "v%d in {%s}%s" % (var, range(rang), axiom_str)
def output(self, stream):
print >> stream, "begin_variables"
print >> stream, len(self.ranges)
for var, (rang, axiom_layer) in enumerate(zip(self.ranges, self.axiom_layers)):
print >> stream, "var%d %d %d" % (var, rang, axiom_layer)
print >> stream, "end_variables"
class SASInit:
def __init__(self, values):
self.values = values
def dump(self):
for var, val in enumerate(self.values):
if val != -1:
print "v%d: %d" % (var, val)
def output(self, stream):
print >> stream, "begin_state"
for val in self.values:
print >> stream, val
print >> stream, "end_state"
class SASGoal:
def __init__(self, pairs):
self.pairs = sorted(pairs)
def dump(self):
for var, val in self.pairs:
print "v%d: %d" % (var, val)
def output(self, stream):
print >> stream, "begin_goal"
print >> stream, len(self.pairs)
for var, val in self.pairs:
print >> stream, var, val
print >> stream, "end_goal"
class SASOperator:
def __init__(self, name, prevail, pre_post, cost):
self.name = name
self.prevail = sorted(prevail)
self.pre_post = sorted(pre_post)
self.cost = cost
def dump(self):
print self.name
print "Prevail:"
for var, val in self.prevail:
print " v%d: %d" % (var, val)
print "Pre/Post:"
for var, pre, post, cond in self.pre_post:
if cond:
cond_str = " [%s]" % ", ".join(["%d: %d" % tuple(c) for c in cond])
else:
cond_str = ""
print " v%d: %d -> %d%s" % (var, pre, post, cond_str)
def output(self, stream):
print >> stream, "begin_operator"
print >> stream, self.name[1:-1]
print >> stream, len(self.prevail)
for var, val in self.prevail:
print >> stream, var, val
print >> stream, len(self.pre_post)
for var, pre, post, cond in self.pre_post:
print >> stream, len(cond),
for cvar, cval in cond:
print >> stream, cvar, cval,
print >> stream, var, pre, post
print >> stream, self.cost
print >> stream, "end_operator"
class SASAxiom:
def __init__(self, condition, effect):
self.condition = condition
self.effect = effect
assert self.effect[1] in (0, 1)
for _, val in condition:
assert val >= 0, condition
def dump(self):
print "Condition:"
for var, val in self.condition:
print " v%d: %d" % (var, val)
print "Effect:"
var, val = self.effect
print " v%d: %d" % (var, val)
def output(self, stream):
print >> stream, "begin_rule"
print >> stream, len(self.condition)
for var, val in self.condition:
print >> stream, var, val
var, val = self.effect
print >> stream, var, 1 - val, val
print >> stream, "end_rule"
| dpattiso/igraph | lama/translate/sas_tasks.py | Python | gpl-2.0 | 3,804 |
from django.shortcuts import render
def index(request):
return render(request, 'index.html', {
})
| robrocker7/h1z1map | server/common/views.py | Python | apache-2.0 | 111 |
import os
import sqlite3
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
# create our little application :)
app = Flask(__name__)
app.config.from_object(__name__)
# Load default config and override config from an environment variable
app.config.update(dict(
DATABASE=os.path.join(app.root_path, 'flaskr.db'),
DEBUG=True,
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
app.config.from_envvar('FLASKR_SETTINGS', silent=True)
def connect_db():
"""Connects to the specific database."""
rv = sqlite3.connect(app.config['DATABASE'])
rv.row_factory = sqlite3.Row
return rv
def init_db():
with app.app_context():
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
if not hasattr(g, 'sqlite_db'):
g.sqlite_db = connect_db()
return g.sqlite_db
@app.teardown_appcontext
def close_db(error):
"""Closes the database again at the end of the request."""
if hasattr(g, 'sqlite_db'):
g.sqlite_db.close()
@app.route('/')
def show_entries():
db = get_db()
cur = db.execute('select title, text from entries order by id desc')
entries = cur.fetchall()
return render_template('show_entries.html', entries=entries)
@app.route('/add', methods=['POST'])
def add_entry():
if not session.get('logged_in'):
abort(401)
db = get_db()
db.execute('insert into entries (title, text) values (?, ?)',
[request.form['title'], request.form['text']])
db.commit()
flash('New entry was successfully posted')
return redirect(url_for('show_entries'))
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != app.config['USERNAME']:
error = 'Invalid username'
elif request.form['password'] != app.config['PASSWORD']:
error = 'Invalid password'
else:
session['logged_in'] = True
flash('You were logged in')
return redirect(url_for('show_entries'))
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('show_entries'))
if __name__ == '__main__':
app.run() | steelium/wardkeeper | wardkeeper/flaskr.py | Python | mit | 2,571 |
from django.shortcuts import render, redirect, get_object_or_404
from miniURL.models import Redirection
from miniURL.forms import RedirectionForm
from django.core.paginator import Paginator
# Create your views here.
from numpy.core.umath import minimum
#def home(request):
# urls = Redirection.objects.all().order_by('-number_access')
# return render(request, 'miniURL/accueil-url.html', {'urls': urls})
# Modification pour la pagination
def home(request, page=1):
urls = Redirection.objects.all().order_by('-number_access')
paginator = Paginator(urls, 5)
try:
urls = paginator.page(page)
except EmptyPage:
urls = paginator.page(paginator.num_pages)
return render(request, 'miniURL/accueil-url.html', locals())
def nouvelle_url(request):
sauvegarde = False
if request.method == "POST":
form = RedirectionForm(request.POST)
if form.is_valid():
#redirection = Redirection()
#redirection.real_url = form.cleaned_data["real_url"]
#redirection.pseudo = form.cleaned_data["pseudo"]
#redirection.small_url = generer(5) # il est aussi possible de le faire dans le model en surchargeanr save()
#redirection.save()
form.save()
sauvegarde = True
return redirect(home)
else:
form = RedirectionForm()
return render(request, 'miniURL/nouv-url.html', locals())
def redirection(request, mini_URL):
url = get_object_or_404(Redirection, small_url=mini_URL)
url.number_access += 1
url.save()
return redirect(url.real_url)
#############################################################
## Test de classes génériques pour la manipulation de données
# Il existe des noms par défaut pour les templates.
from django.views.generic import CreateView, UpdateView, DeleteView
from django.core.urlresolvers import reverse_lazy
class URLCreate(CreateView):
model = Redirection
template_name = 'miniURL/nouv-url.html'
form_class = RedirectionForm
success_url = reverse_lazy(home)
class URLUpdate(UpdateView):
model = Redirection
template_name = 'miniURL/nouv-url.html'
form_class = RedirectionForm
success_url = reverse_lazy(home)
# Modification des paramètres d'appel
def get_object(self, queryset=None):
small_url = self.kwargs.get('small_url', None)
return get_object_or_404(Redirection, small_url=small_url)
# Modification de ce qui se passe lors de la validation.
""" Je ne sais pas encore comment les messages fonctionent
def form_valid(self, form):
self.object = form.save()
# Envoi d'un message à l'utilisateur
messages.success(self.request, "Votre profil a été mis à jour avec succès.")
return HttpResponseRedirect(self.get_success_url())
"""
class URLDelete(DeleteView):
model = Redirection
context_object_name = "mini_url"
template_name = 'miniURL/supp-url.html'
success_url = reverse_lazy(home)
# Modification des paramètres d'appel
def get_object(self, queryset=None):
small_url = self.kwargs.get('small_url', None)
return get_object_or_404(Redirection, small_url=small_url)
| guillaume-havard/testdjango | sitetest/miniURL/views.py | Python | mit | 3,277 |
import numpy as np
from numpy import linalg as la
import time
import subprocess
from streaming import Oja
from streaming import BlockOrthogonal
import MovieLens
if __name__ == "__main__":
#oja=Oja(Ceta=1e-3,k=1,stream=MovieLens.UserStream())
boi1=BlockOrthogonal(k=1,stream=MovieLens.UserStream(sparse=False, file='/var/datasets/ml-10M100K/ratingsTab21.dat'))
boi2=BlockOrthogonal(k=1,stream=MovieLens.UserStream(sparse=False, file='/var/datasets/ml-10M100K/ratingsTab22.dat'))
t0 = time.time()
oneDone=False
twoDone=False
while not (oneDone and twoDone):
try:
boi1.next()
except StopIteration:
oneDone=True
try:
boi2.next()
except StopIteration:
twoDone=True
t1 = time.time()
total = t1-t0
print "Total time: ", total
print np.dot(boi1.getEstimate().T,boi2.getEstimate())
| mitliagkas/pyliakmon | main.py | Python | mit | 931 |
from stard.services import BaseService
class Service(BaseService):
def init_service(self):
self.children = {self.service('child')}
| DexterLB/stard | src/stard/test_samples/father.py | Python | mit | 144 |
from dnfpy.core.constantMap import ConstantMap
import unittest
import numpy as np
from rsdnfMap import RsdnfMap
import dnfpy.view.staticViewMatplotlib as view
class RsdnfMapTestSequence(unittest.TestCase):
def setUp(self):
self.size = 51
self.activation = np.zeros((self.size,self.size),np.bool_)
self.uut = RsdnfMap("uut",self.size,routerType="sequence",activation=self.activation)
self.uut.reset()
def testSubsequenceInit(self):
proba = self.uut.getArg('proba')
self.assertEqual(proba,1)
randomSequence2 = self.uut.getRandomSequence()
self.assertEqual(np.sum(randomSequence2),self.size**2*4)
def testSubsequence(self):
proba = 0.8
size = self.size
randomSequence1 = (np.random.random((size,size,4)) <= proba).astype(np.intc)
self.uut.setRandomSequence(randomSequence1)
randomSequence2 = self.uut.getRandomSequence()
self.assertTrue(np.array_equal(randomSequence1,randomSequence2))
def testComputeP1(self):
randomSequence = self.uut.getRandomSequence()
self.activation[self.size//2,self.size//2] = 1
for i in range(100):
self.uut.compute()
data = self.uut.getData()
view.plotArray(data)
view.show()
self.assertEqual(data[self.size//2+1,self.size//2+1],20)
def testComputeActivationNspike1(self):
self.uut.setParams(nspike=1)
self.activation[self.size//2,self.size//2] = 1
for i in range(101):
self.uut.compute()
data = self.uut.getData()
self.assertEqual(np.sum(data),self.size**2-1)
def testComputeActivationNspike10(self):
self.uut.setParams(nspike=10)
self.activation[self.size//2,self.size//2] = 1
for i in range(140):
self.uut.compute()
data = self.uut.getData()
self.assertEqual(np.sum(data),10*(self.size**2)-10)
def testComputeReset(self):
self.uut.setParams(nspike=1)
self.activation[self.size//2,self.size//2] = 1
self.uut.setParams(proba=1.0)
for i in range(5):
self.uut.compute()
data = self.uut.getData()
self.assertEqual(data[self.size//2+4,self.size//2],1)
self.assertEqual(data[self.size//2+5,self.size//2],0)
self.uut.resetData()
for i in range(5):
self.uut.compute()
data = self.uut.getData()
self.assertEqual(data[self.size//2+4,self.size//2],1)
self.assertEqual(data[self.size//2+5,self.size//2],0)
def testMultiActivation(self):
self.uut.setParams(nspike=9)
self.activation[self.size//2,self.size//2] = 1
self.activation[self.size//2,self.size//2+1] = 1
self.activation[self.size//2+1,self.size//2+1] = 1
self.activation[self.size//2+1,self.size//2] = 1
self.uut.compute()
self.activation[...] = 0
for i in range(30):
self.uut.compute()
data = self.uut.getData()
def testReset(self):
self.uut.setParams(nspike=1)
self.activation[self.size//2,self.size//2] = 1
self.uut.compute()
for i in range(20):
self.uut.compute()
data = self.uut.getData()
self.uut.reset()
data2 = self.uut.getData()
self.assertEqual(np.sum(data2),0)
def testComputeP2(self):
self.activation[self.size//2,self.size//2] = 1
self.uut.setParams(proba=0.99)
for i in range(100):
self.uut.compute()
data = self.uut.getData()
def tes_ComputePrecision(self):#TODO
self.activation[self.size//2,self.size//2] = 1
self.uut.setParams(proba=0.99)
self.uut.setParams(precision=1)
for i in range(100):
self.uut.compute()
data = self.uut.getData()
view.plotArray(data)
view.show()
if __name__ == "__main__":
unittest.main()
| bchappet/dnfpy | src/test_dnfpy/cellular/rsdnfMapTest2.py | Python | gpl-2.0 | 4,424 |
from paypalrestsdk import BillingAgreement
import logging
BILLING_AGREEMENT_ID = "I-HT38K76XPMGJ"
try:
billing_agreement = BillingAgreement.find(BILLING_AGREEMENT_ID)
print("Billing Agreement [%s] has state %s" % (billing_agreement.id, billing_agreement.state))
suspend_note = {
"note": "Suspending the agreement"
}
if billing_agreement.suspend(suspend_note):
# Would expect state has changed to Suspended
billing_agreement = BillingAgreement.find(BILLING_AGREEMENT_ID)
print("Billing Agreement [%s] has state %s" % (billing_agreement.id, billing_agreement.state))
reactivate_note = {
"note": "Reactivating the agreement"
}
if billing_agreement.reactivate(reactivate_note):
# Would expect state has changed to Active
billing_agreement = BillingAgreement.find(BILLING_AGREEMENT_ID)
print("Billing Agreement [%s] has state %s" % (billing_agreement.id, billing_agreement.state))
else:
print(billing_agreement.error)
else:
print(billing_agreement.error)
except ResourceNotFound as error:
print("Billing Agreement Not Found")
| stafur/pyTRUST | paypal-rest-api-sdk-python/samples/subscription/billing_agreements/suspend_and_re_activate.py | Python | apache-2.0 | 1,184 |
# SPDX-License-Identifier: Apache-2.0
# Copyright Contributors to the Rez Project
"""
Run the Rez GUI application.
"""
def setup_parser(parser, completions=False):
parser.add_argument(
"--diff", nargs=2, metavar=("RXT1", "RXT2"),
help="open in diff mode with the given contexts")
FILE_action = parser.add_argument(
"FILE", type=str, nargs='*',
help="context files")
if completions:
from rez.cli._complete_util import FilesCompleter
FILE_action.completer = FilesCompleter()
def command(opts, parser=None, extra_arg_groups=None):
from rezgui.app import run
run(opts, parser)
| nerdvegas/rez | src/rez/cli/gui.py | Python | apache-2.0 | 648 |
import discord
from asyncio import TimeoutError
from database.model import NHLTeams, NHLPlayers
from datetime import datetime
from rapidfuzz import process
from utilities.format import format_list
from utilities.request import fetch
class Game:
__slots__ = (
'status',
'home_id',
'home_name',
'home_score',
'home_wins',
'home_losses',
'home_overtime',
'away_id',
'away_name',
'away_score',
'away_wins',
'away_losses',
'away_overtime',
'venue_id',
'venue_name',
)
def __init__(self, data):
self.status = data.get('status').get('abstractGameState')
self.home_id = data.get('teams').get('home').get('team').get('id')
self.home_name = data.get('teams').get('home').get('team').get('name')
self.home_score = data.get('teams').get('home').get('score')
self.home_wins = data.get('teams').get('home').get('leagueRecord').get('wins')
self.home_losses = data.get('teams').get('home').get('leagueRecord').get('losses')
self.home_overtime = data.get('teams').get('home').get('leagueRecord').get('ot')
self.away_id = data.get('teams').get('away').get('team').get('id')
self.away_name = data.get('teams').get('away').get('team').get('name')
self.away_score = data.get('teams').get('away').get('score')
self.away_wins = data.get('teams').get('away').get('leagueRecord').get('wins')
self.away_losses = data.get('teams').get('away').get('leagueRecord').get('losses')
self.away_overtime = data.get('teams').get('away').get('leagueRecord').get('ot')
self.venue_id = data.get('venue').get('id')
self.venue_name = data.get('venue').get('name')
@property
def home_team_record(self):
return f"{self.home_wins}-{self.home_losses}-{self.home_overtime}"
@property
def away_team_record(self):
return f"{self.away_wins}-{self.away_losses}-{self.away_overtime}"
@property
def winner(self):
if self.status != 'Final':
return None
if self.home_score > self.away_score:
return self.home_name
if self.away_score > self.home_score:
return self.away_name
class GoalieSingleSeason:
__slots__ = (
'time_on_ice',
'overtime',
'shutouts',
'ties',
'wins',
'losses',
'saves',
'power_play_saves',
'short_handed_saves',
'even_saves',
'short_handed_shots',
'even_shots',
'power_play_shots',
'save_percentage',
'goal_against_average',
'games',
'games_started',
'shots_against',
'goals_against',
'time_on_ice_per_game',
'power_play_save_percentage',
'short_handed_save_percentage',
'even_strength_save_percentage'
)
def __init__(self, data):
self.time_on_ice = data.get('timeOnIce')
self.overtime = data.get('ot')
self.shutouts = data.get('shutouts')
self.ties = data.get('ties')
self.wins = data.get('wins')
self.losses = data.get('losses')
self.saves = data.get('saves')
self.power_play_saves = data.get('powerPlaySaves')
self.short_handed_saves = data.get('shortHandedSaves')
self.even_saves = data.get('evenSaves')
self.short_handed_shots = data.get('shortHandedShots')
self.even_shots = data.get('evenShots')
self.power_play_shots = data.get('powerPlayShots')
self.save_percentage = data.get('savePercentage')
self.goal_against_average = data.get('goalAgainstAverage')
self.games = data.get('games')
self.games_started = data.get('gamesStarted')
self.shots_against = data.get('shotsAgainst')
self.goals_against = data.get('goalsAgainst')
self.time_on_ice_per_game = data.get('timeOnIcePerGame')
self.power_play_save_percentage = data.get('powerPlaySavePercentage')
self.short_handed_save_percentage = data.get('shortHandedSavePercentage')
self.even_strength_save_percentage = data.get('evenStrengthSavePercentage')
class Player:
__slots__ = (
'id',
'full_name',
'link',
'first_name',
'last_name',
'number',
'birthdate',
'age',
'city',
'province',
'country',
'nationality',
'height',
'weight',
'active',
'alternate_captain',
'captain',
'rookie',
'shooting_hand',
'team_id',
'team_name',
'team_link',
'position_code',
'position_name',
'position_type',
'position_abbreviation'
)
def __init__(self, data):
self.id = data.get('id')
self.full_name = data.get('fullName')
self.link = data.get('link')
self.first_name = data.get('firstName')
self.last_name = data.get('lastName')
self.number = data.get('primaryNumber')
self.birthdate = data.get('birthDate')
self.age = data.get('currentAge')
self.city = data.get('birthCity')
self.province = data.get('birthStateProvince')
self.country = data.get('birthCountry')
self.nationality = data.get('nationality')
self.height = data.get('height')
self.weight = data.get('weight')
self.active = data.get('active')
self.alternate_captain = data.get('alternateCaptain')
self.captain = data.get('captain')
self.rookie = data.get('rookie')
self.shooting_hand = data.get('shootsCatches')
self.team_id = data.get('currentTeam').get('id')
self.team_name = data.get('currentTeam').get('name')
self.team_link = data.get('currentTeam').get('link')
self.position_code = data.get('primaryPosition').get('code')
self.position_name = data.get('primaryPosition').get('name')
self.position_type = data.get('primaryPosition').get('type')
self.position_abbreviation = data.get('primaryPosition').get('abbreviation')
@property
def birthday(self):
return datetime.strptime(self.birthdate, '%Y-%m-%d').strftime('%B %d, %Y')
class PlayerSingleSeason:
__slots__ = (
'time_on_ice',
'assists',
'goals',
'pim',
'shots',
'games',
'hits',
'powerplay_goals',
'powerplay_points',
'powerplay_time_on_ice',
'even_time_on_ice',
'penalty_minutes',
'faceoff_percentage',
'shot_percentage',
'game_winning_goals',
'overtime_goals',
'shorthanded_goals',
'shorthanded_points',
'shorthanded_time_on_ice',
'blocked',
'plus_minus',
'points',
'shifts',
'time_on_ice_per_game',
'even_time_on_ice_per_game',
'short_handed_time_on_ice_per_game',
'power_play_time_on_ice_per_game'
)
def __init__(self, data):
self.time_on_ice = data.get('timeOnIce')
self.assists = data.get('assists')
self.goals = data.get('goals')
self.pim = data.get('pim')
self.shots = data.get('shots')
self.games = data.get('games')
self.hits = data.get('hits')
self.powerplay_goals = data.get('powerPlayGoals')
self.powerplay_points = data.get('powerPlayPoints')
self.powerplay_time_on_ice = data.get('powerPlayTimeOnIce')
self.even_time_on_ice = data.get('evenTimeOnIce')
self.penalty_minutes = data.get('penaltyMinutes')
self.faceoff_percentage = data.get('faceOffPct')
self.shot_percentage = data.get('shotPct')
self.game_winning_goals = data.get('gameWinningGoals')
self.overtime_goals = data.get('overTimeGoals')
self.shorthanded_goals = data.get('shortHandedGoals')
self.shorthanded_points = data.get('shortHandedPoints')
self.shorthanded_time_on_ice = data.get('shortHandedTimeOnIce')
self.blocked = data.get('blocked')
self.plus_minus = data.get('plusMinus')
self.points = data.get('points')
self.shifts = data.get('shifts')
self.time_on_ice_per_game = data.get('timeOnIcePerGame')
self.even_time_on_ice_per_game = data.get('evenTimeOnIcePerGame')
self.short_handed_time_on_ice_per_game = data.get('shortHandedTimeOnIcePerGame')
self.power_play_time_on_ice_per_game = data.get('powerPlayTimeOnIcePerGame')
class Team:
__slots__ = (
'id',
'name',
'link',
'venue_id',
'venue_name',
'venue_link',
'venue_city',
'timezone_id',
'timezone_offset',
'timezone_tz',
'abbreviation',
'team_name',
'location_name',
'first_year_of_play',
'division_id',
'division_name',
'division_name_short',
'division_link',
'division_abbreviation',
'conference_id',
'conference_name',
'conference_link',
'franchise_id',
'franchise_name',
'franchise_link',
'short_name',
'official_website',
'active'
)
def __init__(self, data):
self.id = data.get('id')
self.name = data.get('name')
self.link = data.get('link')
self.venue_id = data.get('venue').get('id')
self.venue_name = data.get('venue').get('name')
self.venue_link = data.get('venue').get('link')
self.venue_city = data.get('venue').get('city')
self.timezone_id = data.get('venue').get('timeZone').get('id')
self.timezone_offset = data.get('venue').get('timeZone').get('offset')
self.timezone_tz = data.get('venue').get('timeZone').get('tz')
self.abbreviation = data.get('abbreviation')
self.team_name = data.get('teamName')
self.location_name = data.get('locationName')
self.first_year_of_play = data.get('firstYearOfPlay')
self.division_id = data.get('division').get('id')
self.division_name = data.get('division').get('name')
self.division_name_short = data.get('division').get('nameShort')
self.division_link = data.get('division').get('link')
self.division_abbreviation = data.get('division').get('abbreviation')
self.conference_id = data.get('conference').get('id')
self.conference_name = data.get('conference').get('name')
self.conference_link = data.get('conference').get('link')
self.franchise_id = data.get('franchise').get('franchiseId')
self.franchise_name = data.get('franchise').get('teamName')
self.franchise_link = data.get('franchise').get('link')
self.short_name = data.get('shortName')
self.official_website = data.get('officialSiteUrl')
self.active = data.get('active')
async def get_player(session, player_id):
url = f"https://statsapi.web.nhl.com/api/v1/people/{player_id}"
return await fetch(session, url)
async def get_player_single_season(session, player_id):
url = f"https://statsapi.web.nhl.com/api/v1/people/{player_id}/stats?stats=statsSingleSeason"
return await fetch(session, url)
async def get_schedule(session, date):
url = f"https://statsapi.web.nhl.com/api/v1/schedule?date={date}"
return await fetch(session, url)
async def get_teams(session):
url = f"https://statsapi.web.nhl.com/api/v1/teams/"
return await fetch(session, url)
async def get_team_roster(session, team_id):
url = f"https://statsapi.web.nhl.com/api/v1/teams/{team_id}/roster"
return await fetch(session, url)
async def get_team_schedule(session, date, team_id):
url = f"https://statsapi.web.nhl.com/api/v1/schedule?date={date}&teamId={team_id}"
return await fetch(session, url)
async def search_for_players(name: str):
players = dict(
await NHLPlayers
.select('player_id', 'full_name')
.gino
.all()
)
match = process.extract(
name,
players
)
return match
async def search_for_teams(name: str):
teams = dict(
await NHLTeams
.select('team_name', 'team_id')
.gino
.all()
)
match = process.extract(
name,
teams
)
return match
async def get_team_id(viking, ctx, name):
embed = discord.Embed(
inline=True,
colour=viking.color,
title='Team'
)
teams = await search_for_teams(name)
matches = [(name, id) for name, score, id in teams if score > 75]
if not matches:
return None
if len(matches) == 1:
for name, id in matches:
return id
if len(matches) > 1:
teams = {}
for index, match in enumerate(matches, 1):
name, id = match
teams[id] = f"{index}. {name}"
options = format_list(
teams.values(),
enumerate=True
)
embed.add_field(
inline=False,
name='Please type a number to select a team:',
value=options
)
await ctx.send(embed=embed)
def check(message):
if (message.author == ctx.author and
message.channel == ctx.channel):
try:
selection = int(message.content)
except Exception:
pass
else:
if selection >= 1 and selection <= len(matches):
return True
try:
message = await viking.wait_for(
'message',
check=check,
timeout=15
)
except TimeoutError:
raise
else:
team_index = int(message.content) - 1
_, id = matches[team_index]
return id
return matches
async def get_player_id(viking, ctx, name):
embed = discord.Embed(
inline=True,
colour=viking.color,
title='Profile'
)
players = await search_for_players(name)
matches = [(name, id) for name, score, id in players if score > 75]
if not matches:
return None
if len(matches) == 1:
for name, id in matches:
return id
if len(matches) > 1:
players = {}
for index, match in enumerate(matches, 1):
name, id = match
players[id] = f"{index}. {name}"
options = format_list(
players.values(),
enumerate=True
)
embed.add_field(
inline=False,
name='Please type a number to select a player:',
value=options
)
await ctx.send(embed=embed)
def check(message):
if (message.author == ctx.author and
message.channel == ctx.channel):
try:
selection = int(message.content)
except Exception:
pass
else:
if selection >= 1 and selection <= len(matches):
return True
try:
message = await viking.wait_for(
'message',
check=check,
timeout=15
)
except TimeoutError:
raise
else:
player_index = int(message.content) - 1
_, id = matches[player_index]
return id
| braycarlson/viking | utilities/nhl.py | Python | mit | 15,534 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 University of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import __builtin__
import StringIO
from nova import test
from nova.virt.baremetal import tilera
class TileraBareMetalNodesTestCase(test.TestCase):
def setUp(self):
super(TileraBareMetalNodesTestCase, self).setUp()
self.board_info = "\n".join([
'# board_id ip_address mac_address 00:1A:CA:00:57:90 '
'00:1A:CA:00:58:98 00:1A:CA:00:58:50',
'6 10.0.2.7 00:1A:CA:00:58:5C 10 16218 917 476 1 tilera_hv 1 '
'{"vendor":"tilera","model":"TILEmpower","arch":"TILEPro64",'
'"features":["8x8Grid","32bVLIW","5.6MBCache","443BOPS","37TbMesh",'
'"700MHz-866MHz","4DDR2","2XAUIMAC/PHY","2GbEMAC"],'
'"topology":{"cores":"64"}}',
'7 10.0.2.8 00:1A:CA:00:58:A4 10 16218 917 476 1 tilera_hv 1 '
'{"vendor":"tilera","model":"TILEmpower","arch":"TILEPro64",'
'"features":["8x8Grid","32bVLIW","5.6MBCache","443BOPS","37TbMesh",'
'"700MHz-866MHz","4DDR2","2XAUIMAC/PHY","2GbEMAC"],'
'"topology":{"cores":"64"}}',
'8 10.0.2.9 00:1A:CA:00:58:1A 10 16218 917 476 1 tilera_hv 1 '
'{"vendor":"tilera","model":"TILEmpower","arch":"TILEPro64",'
'"features":["8x8Grid","32bVLIW","5.6MBCache","443BOPS","37TbMesh",'
'"700MHz-866MHz","4DDR2","2XAUIMAC/PHY","2GbEMAC"],'
'"topology":{"cores":"64"}}',
'9 10.0.2.10 00:1A:CA:00:58:38 10 16385 1000 0 0 tilera_hv 1 '
'{"vendor":"tilera","model":"TILEmpower","arch":"TILEPro64",'
'"features":["8x8Grid","32bVLIW","5.6MBCache","443BOPS","37TbMesh",'
'"700MHz-866MHz","4DDR2","2XAUIMAC/PHY","2GbEMAC"],'
'"topology":{"cores":"64"}}'])
def tearDown(self):
super(TileraBareMetalNodesTestCase, self).tearDown()
# Reset the singleton state
tilera.BareMetalNodes._instance = None
tilera.BareMetalNodes._is_init = False
def test_singleton(self):
"""Confirm that the object acts like a singleton.
In this case, we check that it only loads the config file once,
even though it has been instantiated multiple times"""
self.mox.StubOutWithMock(__builtin__, 'open')
open("/tftpboot/tilera_boards",
"r").AndReturn(StringIO.StringIO(self.board_info))
self.mox.ReplayAll()
nodes = tilera.BareMetalNodes("/tftpboot/tilera_boards")
nodes = tilera.BareMetalNodes("/tftpboot/tilera_boards")
def test_get_hw_info(self):
self.mox.StubOutWithMock(__builtin__, 'open')
open("/tftpboot/tilera_boards",
"r").AndReturn(StringIO.StringIO(self.board_info))
self.mox.ReplayAll()
nodes = tilera.BareMetalNodes()
self.assertEqual(nodes.get_hw_info('vcpus'), 10)
| tylertian/Openstack | openstack F/nova/nova/tests/baremetal/test_tilera.py | Python | apache-2.0 | 3,342 |
#Author: Damodar Rajbhandari
#!/usr/bin/python3
import numpy as np
import matplotlib.pyplot as plot
from random import *
L = 10
"""
This represents, there are "L" (eg. 3) either in
one row or column. Hence,
Total sites = L*L
"""
for i in range(L):
for j in range(L):
if randint(0, 1) > 0.5:
plot.scatter(i,j, color = 'red') # Dipole has spin up
else:
plot.scatter(i,j, color = 'black') # Dipole has spin down
plot.xlabel('x →')
plot.ylabel('y →')
plot.title('Initial configuration of our lattice')
plot.show()
| Damicristi/Ising-Model-in-2D | Tools/Initialize_lattice_plot.py | Python | gpl-3.0 | 547 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A library supporting use of the Google API Server.
This library helps you configure a set of ProtoRPC services to act as
Endpoints backends. In addition to translating ProtoRPC to Endpoints
compatible errors, it exposes a helper service that describes your services.
Usage:
1) Create an endpoints.api_server instead of a webapp.WSGIApplication.
2) Annotate your ProtoRPC Service class with @endpoints.api to give your
API a name, version, and short description
3) To return an error from Google API Server raise an endpoints.*Exception
The ServiceException classes specify the http status code returned.
For example:
raise endpoints.UnauthorizedException("Please log in as an admin user")
Sample usage:
- - - - app.yaml - - - -
handlers:
# Path to your API backend.
- url: /_ah/spi/.*
# For the legacy python runtime this would be "script: services.py"
script: services.app
- - - - services.py - - - -
import endpoints
import postservice
app = endpoints.api_server([postservice.PostService], debug=True)
- - - - postservice.py - - - -
@endpoints.api(name='guestbook', version='v0.2', description='Guestbook API')
class PostService(remote.Service):
...
@endpoints.method(GetNotesRequest, Notes, name='notes.list', path='notes',
http_method='GET')
def list(self, request):
raise endpoints.UnauthorizedException("Please log in as an admin user")
"""
import cgi
import cStringIO
import httplib
import os
from endpoints import api_backend_service
from endpoints import api_config
from endpoints import api_exceptions
from endpoints import protojson
from protorpc import messages
from protorpc import remote
from protorpc.wsgi import service as wsgi_service
package = 'google.appengine.endpoints'
__all__ = [
'api_server',
'EndpointsErrorMessage',
'package',
]
class _Remapped405Exception(api_exceptions.ServiceException):
"""Method Not Allowed (405) ends up being remapped to 501.
This is included here for compatibility with the Java implementation. The
Google Cloud Endpoints server remaps HTTP 405 to 501."""
http_status = httplib.METHOD_NOT_ALLOWED
class _Remapped408Exception(api_exceptions.ServiceException):
"""Request Timeout (408) ends up being remapped to 503.
This is included here for compatibility with the Java implementation. The
Google Cloud Endpoints server remaps HTTP 408 to 503."""
http_status = httplib.REQUEST_TIMEOUT
_ERROR_NAME_MAP = dict((httplib.responses[c.http_status], c) for c in [
api_exceptions.BadRequestException,
api_exceptions.UnauthorizedException,
api_exceptions.ForbiddenException,
api_exceptions.NotFoundException,
_Remapped405Exception,
_Remapped408Exception,
api_exceptions.ConflictException,
api_exceptions.GoneException,
api_exceptions.PreconditionFailedException,
api_exceptions.RequestEntityTooLargeException,
api_exceptions.InternalServerErrorException
])
_ALL_JSON_CONTENT_TYPES = frozenset(
[protojson.EndpointsProtoJson.CONTENT_TYPE] +
protojson.EndpointsProtoJson.ALTERNATIVE_CONTENT_TYPES)
class EndpointsErrorMessage(messages.Message):
"""Message for returning error back to Google Endpoints frontend.
Fields:
state: State of RPC, should be 'APPLICATION_ERROR'.
error_message: Error message associated with status.
"""
class State(messages.Enum):
"""Enumeration of possible RPC states.
Values:
OK: Completed successfully.
RUNNING: Still running, not complete.
REQUEST_ERROR: Request was malformed or incomplete.
SERVER_ERROR: Server experienced an unexpected error.
NETWORK_ERROR: An error occured on the network.
APPLICATION_ERROR: The application is indicating an error.
When in this state, RPC should also set application_error.
"""
OK = 0
RUNNING = 1
REQUEST_ERROR = 2
SERVER_ERROR = 3
NETWORK_ERROR = 4
APPLICATION_ERROR = 5
METHOD_NOT_FOUND_ERROR = 6
state = messages.EnumField(State, 1, required=True)
error_message = messages.StringField(2)
def _get_app_revision(environ=None):
"""Gets the app revision (minor app version) of the current app.
Args:
environ: A dictionary with a key CURRENT_VERSION_ID that maps to a version
string of the format <major>.<minor>.
Returns:
The app revision (minor version) of the current app, or None if one couldn't
be found.
"""
if environ is None:
environ = os.environ
if 'CURRENT_VERSION_ID' in environ:
return environ['CURRENT_VERSION_ID'].split('.')[1]
class _ApiServer(object):
"""ProtoRPC wrapper, registers APIs and formats errors for Google API Server.
- - - - ProtoRPC error format - - - -
HTTP/1.0 400 Please log in as an admin user.
content-type: application/json
{
"state": "APPLICATION_ERROR",
"error_message": "Please log in as an admin user",
"error_name": "unauthorized",
}
- - - - Reformatted error format - - - -
HTTP/1.0 401 UNAUTHORIZED
content-type: application/json
{
"state": "APPLICATION_ERROR",
"error_message": "Please log in as an admin user"
}
"""
__SPI_PREFIX = '/_ah/spi/'
__BACKEND_SERVICE_ROOT = '%sBackendService' % __SPI_PREFIX
__SERVER_SOFTWARE = 'SERVER_SOFTWARE'
__IGNORE_RESTRICTION_PREFIXES = ('Development/', 'WSGIServer/', 'testutil/')
__HEADER_NAME_PEER = 'HTTP_X_APPENGINE_PEER'
__GOOGLE_PEER = 'apiserving'
__PROTOJSON = protojson.EndpointsProtoJson()
def __init__(self, api_services, **kwargs):
"""Initialize an _ApiServer instance.
The primary function of this method is to set up the WSGIApplication
instance for the service handlers described by the services passed in.
Additionally, it registers each API in ApiConfigRegistry for later use
in the BackendService.getApiConfigs() (API config enumeration service).
Args:
api_services: List of protorpc.remote.Service classes implementing the API
or a list of _ApiDecorator instances that decorate the service classes
for an API.
**kwargs: Passed through to protorpc.wsgi.service.service_handlers except:
protocols - ProtoRPC protocols are not supported, and are disallowed.
restricted - If True or unset, the API will only be allowed to serve to
Google's API serving infrastructure once deployed. Set to False to
allow other clients. Under dev_appserver, all clients are accepted.
NOTE! Under experimental launch, this is not a secure restriction and
other authentication mechanisms *must* be used to control access to
the API. The restriction is only intended to notify developers of
a possible upcoming feature to securely restrict access to the API.
Raises:
TypeError: if protocols are configured (this feature is not supported).
ApiConfigurationError: if there's a problem with the API config.
"""
for entry in api_services[:]:
if isinstance(entry, api_config._ApiDecorator):
api_services.remove(entry)
api_services.extend(entry.get_api_classes())
self.api_config_registry = api_backend_service.ApiConfigRegistry()
api_name_version_map = self.__create_name_version_map(api_services)
protorpc_services = self.__register_services(api_name_version_map,
self.api_config_registry)
backend_service = api_backend_service.BackendServiceImpl.new_factory(
self.api_config_registry, _get_app_revision())
protorpc_services.insert(0, (self.__BACKEND_SERVICE_ROOT, backend_service))
if 'protocols' in kwargs:
raise TypeError('__init__() got an unexpected keyword argument '
"'protocols'")
protocols = remote.Protocols()
protocols.add_protocol(self.__PROTOJSON, 'protojson')
remote.Protocols.set_default(protocols)
self.restricted = kwargs.pop('restricted', True)
self.service_app = wsgi_service.service_mappings(protorpc_services,
**kwargs)
@staticmethod
def __create_name_version_map(api_services):
"""Create a map from API name/version to Service class/factory.
This creates a map from an API name and version to a list of remote.Service
factories that implement that API.
Args:
api_services: A list of remote.Service-derived classes or factories
created with remote.Service.new_factory.
Returns:
A mapping from (api name, api version) to a list of service factories,
for service classes that implement that API.
Raises:
ApiConfigurationError: If a Service class appears more than once
in api_services.
"""
api_name_version_map = {}
for service_factory in api_services:
try:
service_class = service_factory.service_class
except AttributeError:
service_class = service_factory
service_factory = service_class.new_factory()
key = service_class.api_info.name, service_class.api_info.version
service_factories = api_name_version_map.setdefault(key, [])
if service_factory in service_factories:
raise api_config.ApiConfigurationError(
'Can\'t add the same class to an API twice: %s' %
service_factory.service_class.__name__)
service_factories.append(service_factory)
return api_name_version_map
@staticmethod
def __register_services(api_name_version_map, api_config_registry):
"""Register & return a list of each SPI URL and class that handles that URL.
This finds every service class in api_name_version_map, registers it with
the given ApiConfigRegistry, builds the SPI url for that class, and adds
the URL and its factory to a list that's returned.
Args:
api_name_version_map: A mapping from (api name, api version) to a list of
service factories, as returned by __create_name_version_map.
api_config_registry: The ApiConfigRegistry where service classes will
be registered.
Returns:
A list of (SPI URL, service_factory) for each service class in
api_name_version_map.
Raises:
ApiConfigurationError: If a Service class appears more than once
in api_name_version_map. This could happen if one class is used to
implement multiple APIs.
"""
generator = api_config.ApiConfigGenerator()
protorpc_services = []
for service_factories in api_name_version_map.itervalues():
service_classes = [service_factory.service_class
for service_factory in service_factories]
config_file = generator.pretty_print_config_to_json(service_classes)
api_config_registry.register_spi(config_file)
for service_factory in service_factories:
protorpc_class_name = service_factory.service_class.__name__
root = _ApiServer.__SPI_PREFIX + protorpc_class_name
if any(service_map[0] == root or service_map[1] == service_factory
for service_map in protorpc_services):
raise api_config.ApiConfigurationError(
'Can\'t reuse the same class in multiple APIs: %s' %
protorpc_class_name)
protorpc_services.append((root, service_factory))
return protorpc_services
def __is_request_restricted(self, environ):
"""Determine if access to SPI should be denied.
Access will always be allowed in dev_appserver and under unit tests, but
will only be allowed in production if the HTTP header HTTP_X_APPENGINE_PEER
is set to 'apiserving'. Google's Endpoints server sets this header by
default and App Engine may securely prevent outside callers from setting it
in the future to allow better protection of the API backend.
Args:
environ: WSGI environment dictionary.
Returns:
True if access should be denied, else False.
"""
if not self.restricted:
return False
server = environ.get(self.__SERVER_SOFTWARE, '')
for prefix in self.__IGNORE_RESTRICTION_PREFIXES:
if server.startswith(prefix):
return False
peer_name = environ.get(self.__HEADER_NAME_PEER, '')
return peer_name.lower() != self.__GOOGLE_PEER
def __is_json_error(self, status, headers):
"""Determine if response is an error.
Args:
status: HTTP status code.
headers: Dictionary of (lowercase) header name to value.
Returns:
True if the response was an error, else False.
"""
content_header = headers.get('content-type', '')
content_type, unused_params = cgi.parse_header(content_header)
return (status.startswith('400') and
content_type.lower() in _ALL_JSON_CONTENT_TYPES)
def __write_error(self, status_code, error_message=None):
"""Return the HTTP status line and body for a given error code and message.
Args:
status_code: HTTP status code to be returned.
error_message: Error message to be returned.
Returns:
Tuple (http_status, body):
http_status: HTTP status line, e.g. 200 OK.
body: Body of the HTTP request.
"""
if error_message is None:
error_message = httplib.responses[status_code]
status = '%d %s' % (status_code, httplib.responses[status_code])
message = EndpointsErrorMessage(
state=EndpointsErrorMessage.State.APPLICATION_ERROR,
error_message=error_message)
return status, self.__PROTOJSON.encode_message(message)
def protorpc_to_endpoints_error(self, status, body):
"""Convert a ProtoRPC error to the format expected by Google Endpoints.
If the body does not contain an ProtoRPC message in state APPLICATION_ERROR
the status and body will be returned unchanged.
Args:
status: HTTP status of the response from the backend
body: JSON-encoded error in format expected by Endpoints frontend.
Returns:
Tuple of (http status, body)
"""
try:
rpc_error = self.__PROTOJSON.decode_message(remote.RpcStatus, body)
except (ValueError, messages.ValidationError):
rpc_error = remote.RpcStatus()
if rpc_error.state == remote.RpcStatus.State.APPLICATION_ERROR:
error_class = _ERROR_NAME_MAP.get(rpc_error.error_name)
if error_class:
status, body = self.__write_error(error_class.http_status,
rpc_error.error_message)
return status, body
def __call__(self, environ, start_response):
"""Wrapper for Swarm server app.
Args:
environ: WSGI request environment.
start_response: WSGI start response function.
Returns:
Response from service_app or appropriately transformed error response.
"""
def StartResponse(status, headers, exc_info=None):
"""Save args, defer start_response until response body is parsed.
Create output buffer for body to be written into.
Note: this is not quite WSGI compliant: The body should come back as an
iterator returned from calling service_app() but instead, StartResponse
returns a writer that will be later called to output the body.
See google/appengine/ext/webapp/__init__.py::Response.wsgi_write()
write = start_response('%d %s' % self.__status, self.__wsgi_headers)
write(body)
Args:
status: Http status to be sent with this response
headers: Http headers to be sent with this response
exc_info: Exception info to be displayed for this response
Returns:
callable that takes as an argument the body content
"""
call_context['status'] = status
call_context['headers'] = headers
call_context['exc_info'] = exc_info
return body_buffer.write
if self.__is_request_restricted(environ):
status, body = self.__write_error(httplib.NOT_FOUND)
headers = [('Content-Type', 'text/plain')]
exception = None
else:
call_context = {}
body_buffer = cStringIO.StringIO()
body_iter = self.service_app(environ, StartResponse)
status = call_context['status']
headers = call_context['headers']
exception = call_context['exc_info']
body = body_buffer.getvalue()
if not body:
body = ''.join(body_iter)
headers_dict = dict([(k.lower(), v) for k, v in headers])
if self.__is_json_error(status, headers_dict):
status, body = self.protorpc_to_endpoints_error(status, body)
start_response(status, headers, exception)
return [body]
def api_server(api_services, **kwargs):
"""Create an api_server.
The primary function of this method is to set up the WSGIApplication
instance for the service handlers described by the services passed in.
Additionally, it registers each API in ApiConfigRegistry for later use
in the BackendService.getApiConfigs() (API config enumeration service).
Args:
api_services: List of protorpc.remote.Service classes implementing the API
or a list of _ApiDecorator instances that decorate the service classes
for an API.
**kwargs: Passed through to protorpc.wsgi.service.service_handlers except:
protocols - ProtoRPC protocols are not supported, and are disallowed.
restricted - If True or unset, the API will only be allowed to serve to
Google's API serving infrastructure once deployed. Set to False to
allow other clients. Under dev_appserver, all clients are accepted.
NOTE! Under experimental launch, this is not a secure restriction and
other authentication mechanisms *must* be used to control access to
the API. The restriction is only intended to notify developers of
a possible upcoming feature to securely restrict access to the API.
Returns:
A new WSGIApplication that serves the API backend and config registry.
Raises:
TypeError: if protocols are configured (this feature is not supported).
"""
if 'protocols' in kwargs:
raise TypeError("__init__() got an unexpected keyword argument 'protocols'")
return _ApiServer(api_services, **kwargs)
| ProfessionalIT/professionalit-webiste | sdk/google_appengine/lib/endpoints-1.0/endpoints/apiserving.py | Python | lgpl-3.0 | 18,737 |
# Copyright (C) 2009-2015 Contributors as noted in the AUTHORS file
#
# This file is part of Autopilot.
#
# Autopilot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Autopilot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Autopilot. If not, see <http://www.gnu.org/licenses/>.
import wx
import lib.guinatives.facade as facade
from lib.app.exceptions import NotFoundException
from lib.reporting.logger import Logger
from lib.guinatives.facade import has_win_implementation
class Found(Exception):
pass
def get_dialog_label():
return facade.get_window_text(facade.get_active_window()).decode("utf-8")
class GuiExplorer(object):
def find_win_by_name_or_label(self, name_or_label):
self.wxctrl = None
self.winctrl = None
try:
if name_or_label is None:
self.wxctrl = wx.GetApp().GetTopWindow()
raise Found("Top window")
for name in name_or_label.split("|"):
name = name.strip()
self.find_wx_win(name, name)
self.find_win_win(name)
except Found, ex:
Logger.add_debug(ex.message)
return
raise NotFoundException()
def find_wx_win(self, name=None, label=None):
self.find_wxwin_by_name(name)
self.find_wxwin_by_label(label)
def find_wxwin_by_name(self, name):
for wxwin in wx.GetTopLevelWindows():
if name is not None and hasattr(wxwin, "Name"):
found_msg = "wx Name (%s) found" % name
if wxwin.Name == name:
self.wxctrl = wxwin
raise Found(found_msg)
def find_wxwin_by_label(self, label):
for wxwin in wx.GetTopLevelWindows():
if label is not None and hasattr(wxwin, "Label"):
found_msg = "wx Label (%s) found" % label
if wxwin.Label == label:
self.wxctrl = wxwin
raise Found(found_msg)
def find_win_win(self, label):
self.find_winwin_by_label(label)
def find_winwin_by_label(self, label):
if label is not None:
found_msg = "win Label (%s) found" % label
hwnd = facade.get_active_window()
winlbl = facade.get_window_text(hwnd)
winlbl = winlbl.decode("utf-8")
if winlbl == label:
self.winctrl = hwnd
raise Found(found_msg)
def find_ctrl(self, parent, key, wx_classname=None):
self.wxctrl = None
self.winctrl = None
try:
pos = int(key)
wxid = pos
name = None
label = None
except:
pos = None
wxid = None
name = key
label = key
self._find_ctrl(parent, name, label, wxid=wxid, pos=pos, wx_classname=wx_classname)
def is_wx_control(self):
return self.wxctrl is not None
def is_native_control(self):
return self.winctrl is not None
def get_ctrl(self):
if self.is_wx_control():
return self.wxctrl
else:
return self.winctrl
def get_wxid(self):
return self.wxctrl.GetId()
def get_hwnd(self):
return self.winctrl
def _find_ctrl(self, parent, name=None, label=None, wxid=None, pos=None, wx_classname=None):
try:
self.find_wx_ctrl(parent, name, label, wxid, pos, wx_classname)
self.find_win_ctrl(parent, name, label, wxid, pos, wx_classname)
except Found, ex:
Logger.add_debug(ex.message)
return
raise NotFoundException()
def find_wx_ctrl(self, parent, name=None, label=None, wxid=None, pos=None, wx_classname=None):
self.find_ctrl_by_name(parent, name)
self.find_ctrl_by_label(parent, label)
self.find_ctrl_by_wxid(parent, wxid)
self.find_ctrl_by_pos(parent, wx_classname, pos)
def find_win_ctrl(self, parent, name=None, label=None, wxid=None, pos=None, wx_classname=None):
if has_win_implementation():
self.find_win_ctrl_by_pos(parent, wx_classname, pos)
self.find_win_ctrl_by_label(parent, label)
def find_ctrl_by_name(self, parent, name):
if name is not None and hasattr(parent, "GetChildren"):
found_msg = "wx Name (%s) found" % name
for ctrl in parent.GetChildren():
if not hasattr(ctrl, "Name"):
continue
if ctrl.Name == name:
self.wxctrl = ctrl
raise Found(found_msg)
self.find_ctrl_by_name(ctrl, name)
def find_ctrl_by_label(self, parent, label):
if label is not None and hasattr(parent, "GetChildren"):
found_msg = "wx Label (%s) found" % label
for ctrl in parent.GetChildren():
if not hasattr(ctrl, "Label"):
continue
if ctrl.Label == label:
self.wxctrl = ctrl
raise Found(found_msg)
# Try with elipses
elif ctrl.Label == label + "...":
self.wxctrl = ctrl
raise Found(found_msg)
# Try with accelerator
else:
for i in range(len(label)):
lbl = label[0:i] + "&" + label[i:]
if ctrl.Label == lbl:
self.wxctrl = ctrl
raise Found(found_msg)
def find_ctrl_by_wxid(self, parent, wxid):
if wxid is not None:
found_msg = "wx id found(%d)" % wxid
for ctrl in parent.GetChildren():
if not hasattr(ctrl, "GetId"):
continue
if ctrl.GetId() == wxid:
self.wxctrl = ctrl
raise Found(found_msg)
def find_ctrl_by_pos(self, parent, wx_classname, pos):
if pos is not None:
found_msg = "wx position found(%d)" % pos
inx = 0
for ctrl in parent.GetChildren():
if not hasattr(ctrl, "ClassName"):
continue
if ctrl.ClassName == wx_classname:
if inx == pos - 1:
self.wxctrl = ctrl
raise Found(found_msg)
else:
inx += 1
def find_win_ctrl_by_pos(self, parent, wx_classname, pos):
if wx_classname is not None and pos is not None:
found_msg = "win order (%d) found for class(%s)" % (pos, facade.wx_to_win_classname(wx_classname))
inx = 0
try:
hwnd = facade.get_active_window()
children = facade.get_children(hwnd)
except:
return
win_classname = facade.wx_to_win_classname(wx_classname)
for hwnd, class_name, _ in children:
if class_name == win_classname:
if inx == pos - 1:
self.winctrl = hwnd
raise Found(found_msg)
else:
inx += 1
def find_win_ctrl_by_label(self, parent, label):
if label is not None:
found_msg = "win Label (%s) found" % label
try:
hwnd = facade.get_active_window()
children = facade.get_children(hwnd)
except:
return
for hwnd, _, winlbl in children:
if winlbl == label:
self.winctrl = hwnd
raise Found(found_msg)
# Try with elipses
elif winlbl == label + "...":
self.winctrl = hwnd
raise Found(found_msg)
# Try with accelerator
else:
for i in range(len(label)):
lbl = label[0:i] + "&" + label[i:]
if winlbl == lbl:
self.winctrl = hwnd
raise Found(found_msg)
def find_menu(self, args):
try:
win = wx.GetApp().GetTopWindow()
item_id = self._find_menu_item_id(args)
if item_id != wx.NOT_FOUND:
self.item_id = item_id
raise Found("Menu found")
except Found, ex:
Logger.add_debug(ex.message)
return
raise NotFoundException()
def _find_menu_item_id(self, args):
win = wx.GetApp().GetTopWindow()
labels = args
menu_bar = self._get_menu_bar(win)
menu = self._find_menu(win, labels[0])
if menu is None:
raise NotFoundException()
labels = labels[1:]
while len(labels) > 0:
item_id = self._get_menu_item_id(menu, labels[0])
if len(labels) > 1:
menu_item = menu_bar.FindItemById(item_id)
menu = menu_item.GetSubMenu()
labels = labels[1:]
return item_id
def _find_menu(self, win, label):
menu_bar = self._get_menu_bar(win)
labels = label.split("|")
for label in labels:
inx = menu_bar.FindMenu(label)
if inx != -1:
return menu_bar.GetMenu(inx)
return None
def _get_menu_bar(self, win):
menu_bar = win.GetMenuBar()
if menu_bar is None:
raise NotFoundException()
return menu_bar
def _get_menu_item_id(self, menu, label):
labels = label.split("|")
for label in labels:
valid_labels = self._get_valid_labels(label)
for lbl in valid_labels:
try:
itm = menu.FindItemByPosition(0)
l = itm.GetItemLabelText()
if l[-1] == u"\u2026":
pass
item_id = menu.FindItem(lbl)
except Exception, ex:
pass
if item_id != wx.NOT_FOUND:
return item_id
raise NotFoundException()
def _get_valid_labels(self, label):
valid_labels = [label]
self._get_elipsis_label(label, valid_labels)
self._get_accelerator_labels(label, valid_labels)
return valid_labels
def _get_elipsis_label(self, label, alternative_labels):
if label.endswith("..."):
label = label[:-3]
alternative_labels.append(label + "...")
alternative_labels.append(label + u"\u2026")
def _get_accelerator_labels(self, label, alternative_labels):
for i in range(len(label)):
alternative_label = label[0:i] + "&" + label[i:]
alternative_labels.append(alternative_label)
return alternative_labels
| rogerlindberg/autopilot | src/lib/guinatives/ctrlfinder.py | Python | gpl-3.0 | 11,309 |
"""
radish
~~~~~~
The root from red to green. BDD tooling for Python.
:copyright: (c) 2019 by Timo Furrer <tuxtimo@gmail.com>
:license: MIT, see LICENSE for more details.
"""
import sys
from pathlib import Path
import click
import colorful as cf
from radish.config import Config
from radish.terrain import world
from radish.parser.core import FeatureFileParser
from radish.errors import RadishError
from radish.logger import enable_radish_debug_mode_click_hook, logger
def expand_feature_files(ctx, param, feature_files):
"""Expand the given feature files
Expanding directories recursively for Feature Files
"""
expanded_feature_files = []
for feature_file_location in (Path(f) for f in feature_files):
if feature_file_location.is_dir():
expanded_feature_files.extend(
list(feature_file_location.glob("**/*.feature"))
)
else:
expanded_feature_files.append(feature_file_location)
return expanded_feature_files
@click.command()
@click.version_option()
@click.help_option("--help", "-h")
@click.option(
"--debug",
"-d",
"enable_debug_mode",
is_flag=True,
help="Enable debug mode for radish itself",
callback=enable_radish_debug_mode_click_hook,
)
@click.option(
"--no-ansi",
"no_ansi",
is_flag=True,
help=(
"Turn off all ANSI sequences (colors, line rewrites) ."
"This option is mainly used by the formatters"
),
)
@click.option(
"--resolve-preconditions",
"resolve_preconditions",
is_flag=True,
help="Resolve @preconditions when parsing the Feature Files",
)
@click.argument(
"feature_files",
nargs=-1,
type=click.Path(exists=True),
callback=expand_feature_files,
)
def cli(**kwargs):
"""radish - The root from red to green. BDD tooling for Python.
Parse and Pretty Print the raw radish AST for the given Feature Files
Provide the Feature Files to run in FEATURE_FILES.
"""
config = Config(kwargs)
world.config = config
# turn of ANSI colors if requested
if config.no_ansi:
cf.disable()
parser = FeatureFileParser(
ast_transformer=None, resolve_preconditions=config.resolve_preconditions
)
for feature_file in config.feature_files:
logger.info("Parsing Feature File %s", feature_file)
try:
feature_ast = parser.parse(feature_file)
if feature_ast:
print(feature_ast.pretty())
except RadishError as exc:
print("", flush=True)
print(
"An error occured while parsing the Feature File {}:".format(
feature_file
),
flush=True,
)
print(exc, flush=True)
sys.exit(1)
if __name__ == "__main__":
cli()
| radish-bdd/radish | src/radish/parser/__main__.py | Python | mit | 2,847 |
# -*- coding: utf-8 -*-
# This file is part of PrawoKultury, licensed under GNU Affero GPLv3 or later.
# Copyright © Fundacja Nowoczesna Polska. See NOTICE for more information.
#
import re
from datetime import datetime
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.mail import mail_managers, send_mail
from django.db import models
from django.template import loader, Context
from django.utils.translation import ugettext_lazy as _, ugettext
from django_comments_xtd.models import XtdComment
from fnpdjango.utils.fields import TextileField
from fnpdjango.utils.models.translation import add_translatable, tQ
from migdal import app_settings
from migdal.fields import SlugNullField
class Category(models.Model):
taxonomy = models.CharField(_('taxonomy'), max_length=32, choices=app_settings.TAXONOMIES)
class Meta:
verbose_name = _('category')
verbose_name_plural = _('categories')
def __unicode__(self):
return self.title or u""
@models.permalink
def get_absolute_url(self):
return 'migdal_category', [self.slug]
add_translatable(Category, languages=app_settings.LANGUAGES, fields={
'title': models.CharField(max_length=64, unique=True, db_index=True),
'slug': models.SlugField(unique=True, db_index=True),
})
class PublishedEntryManager(models.Manager):
def get_queryset(self):
return super(PublishedEntryManager, self).get_queryset().filter(
tQ(published=True)
)
class Entry(models.Model):
type = models.CharField(
max_length=16,
choices=((t.db, t.slug) for t in app_settings.TYPES),
db_index=True)
date = models.DateTimeField(_('created at'), auto_now_add=True, db_index=True)
changed_at = models.DateTimeField(_('changed at'), auto_now=True, db_index=True)
author = models.CharField(_('author'), max_length=128)
author_email = models.EmailField(
_('author email'), max_length=128, null=True, blank=True,
help_text=_('Used only to display gravatar and send notifications.'))
image = models.ImageField(_('image'), upload_to='entry/image/', null=True, blank=True)
promo = models.BooleanField(_('promoted'), default=False)
in_stream = models.BooleanField(_('in stream'), default=True)
categories = models.ManyToManyField(Category, blank=True, verbose_name=_('categories'))
first_published_at = models.DateTimeField(_('published at'), null=True, blank=True)
canonical_url = models.URLField(_('canonical link'), null=True, blank=True)
objects = models.Manager()
published_objects = PublishedEntryManager()
class Meta:
verbose_name = _('entry')
verbose_name_plural = _('entries')
ordering = ['-date']
def __unicode__(self):
return self.title
def save(self, *args, **kwargs):
published_now = False
for lc, ln in app_settings.LANGUAGES:
if (getattr(self, "published_%s" % lc)
and getattr(self, "published_at_%s" % lc) is None):
now = datetime.now()
setattr(self, "published_at_%s" % lc, now)
if self.first_published_at is None:
self.first_published_at = now
published_now = True
super(Entry, self).save(*args, **kwargs)
if published_now and self.pk is not None:
self.notify_author_published()
def clean(self):
for lc, ln in app_settings.LANGUAGES:
if (getattr(self, "published_%s" % lc) and
not getattr(self, "slug_%s" % lc)):
raise ValidationError(
ugettext("Published entry should have a slug in relevant language (%s).") % lc)
@models.permalink
def get_absolute_url(self):
return 'migdal_entry_%s' % self.type, [self.slug]
def get_type(self):
return dict(app_settings.TYPES_DICT)[self.type]
def notify_author_published(self):
if not self.author_email:
return
site = Site.objects.get_current()
mail_text = loader.get_template('migdal/mail/published.txt').render(
Context({
'entry': self,
'site': site,
}))
send_mail(
ugettext(u'Your story has been published at %s.') % site.domain,
mail_text, settings.SERVER_EMAIL, [self.author_email]
)
def inline_html(self):
for att in self.attachment_set.all():
if att.file.name.endswith(".html"):
with open(att.file.path) as f:
yield f.read()
add_translatable(Entry, languages=app_settings.OPTIONAL_LANGUAGES, fields={
'needed': models.CharField(_('needed'), max_length=1, db_index=True, choices=(
('n', _('Unneeded')), ('w', _('Needed')), ('y', _('Done'))),
default='n'),
})
TEXTILE_HELP = _('Use <a href="https://txstyle.org/article/44/an-overview-of-the-textile-syntax">Textile</a> syntax.')
add_translatable(Entry, languages=app_settings.LANGUAGES, fields={
'slug': SlugNullField(unique=True, db_index=True, null=True, blank=True),
'title': models.CharField(_('title'), max_length=255, null=True, blank=True),
'lead': TextileField(
_('lead'), markup_type='textile_pl', null=True, blank=True, help_text=TEXTILE_HELP),
'body': TextileField(
_('body'), markup_type='textile_pl', null=True, blank=True, help_text=TEXTILE_HELP),
'place': models.CharField(_('place'), null=True, blank=True, max_length=256),
'time': models.CharField(_('time'), null=True, blank=True, max_length=256),
'published': models.BooleanField(_('published'), default=False),
'published_at': models.DateTimeField(_('published at'), null=True, blank=True),
})
class Attachment(models.Model):
file = models.FileField(_('file'), upload_to='entry/attach/')
entry = models.ForeignKey(Entry)
def url(self):
return self.file.url if self.file else ''
class Photo(models.Model):
image = models.ImageField(_('image'), upload_to='entry/photo/')
entry = models.ForeignKey(Entry)
def url(self):
return self.image.url if self.image else ''
def notify_new_comment(sender, instance, created, **kwargs):
if created and isinstance(instance.content_object, Entry) and instance.content_object.author_email:
site = Site.objects.get_current()
mail_text = loader.get_template('migdal/mail/new_comment.txt').render(
Context({
'comment': instance,
'site': site,
}))
send_mail(
ugettext(u'New comment under your story at %s.') % site.domain,
mail_text, settings.SERVER_EMAIL,
[instance.content_object.author_email]
)
models.signals.post_save.connect(notify_new_comment, sender=XtdComment)
def spamfilter(sender, comment, **kwargs):
"""Very simple spam filter. Just don't let any HTML links go through."""
if re.search(r"<a\s+href=", comment.comment):
fields = (
comment.user, comment.user_name, comment.user_email,
comment.user_url, comment.submit_date, comment.ip_address,
comment.followup, comment.comment)
mail_managers(
u"Spam filter report",
(u"""This comment was turned down as SPAM: \n""" +
"""\n%s""" * len(fields) +
"""\n\nYou don't have to do anything.""") % fields)
return False
return True
# comment_will_be_posted.connect(spamfilter)
| fnp/django-migdal | migdal/models.py | Python | agpl-3.0 | 7,640 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from functools import partial
from PyQt5 import QtWidgets, QtCore
from controller.gensec.dialogs.processes.information import Information
from view.dialogs.base_dialog import BaseDialog
from view.gensec.dialogs.processes.ui_posl import Ui_process
class POSL(BaseDialog, Ui_process):
def __init__(self, process_data=False, parent=None):
BaseDialog.__init__(self, parent)
self.setupUi(self)
self.id = 4
self.date_type = ''
self.comments = ''
self.channels_calculation = 0
self.time_per_channel_calculation = 0
self.information_dialog = None
self.push_button_accept.clicked.connect(self.accept)
self.push_button_cancel.clicked.connect(self.close)
self.push_button_information.clicked.connect(self.showInformationDialog)
self.before_stimulation.valueChanged.connect(partial(self.dataPointsValidator, 1))
self.during_stimulation.valueChanged.connect(partial(self.dataPointsValidator, 2))
self.after_stimulation.valueChanged.connect(partial(self.dataPointsValidator, 3))
self.time.valueChanged.connect(self.updateTimePerChannel)
self.time_measurement.currentIndexChanged.connect(self.updateTimePerChannel)
width = self.sizeHint().width()
height = self.sizeHint().height()
widget = QtWidgets.QDesktopWidget()
main_screen_size = widget.availableGeometry(widget.primaryScreen())
pos_x = (main_screen_size.width() / 2) - (width / 2)
pos_y = (main_screen_size.height() / 2) - (height / 2)
self.setGeometry(QtCore.QRect(pos_x, pos_y, width, height))
self.fill(process_data)
def fill(self, process_data):
if process_data and process_data is not None:
self.stabilization.setValue(process_data["stabilization"])
self.heating_rate.setValue(process_data["heating_rate"])
self.final_temperature.setValue(process_data["final_temp"])
self.time.setValue(self.convertTime(process_data["time"], process_data["time_unit"]))
self.optical_power.setValue(process_data["start_optical_power"])
self.before_stimulation.setValue(process_data["datapoints1"])
self.during_stimulation.setValue(process_data["datapoints2"])
self.after_stimulation.setValue(process_data["datapoints3"])
self.number_of_scan.setValue(process_data["number_scan"])
time_measurements = {
'ms': 0,
's': 1,
'us': 2
}
self.time_measurement.setCurrentIndex(time_measurements[process_data["time_unit"]])
light_source = {
'Blue': 0,
'IR': 1,
'AUX': 2,
}
self.ligth_source.setCurrentIndex(light_source[process_data["light_source"]])
self.time_per_channel_calculation = process_data["timePerChannel"]
self.channels_calculation = process_data["channels"]
self.date_type = process_data["date_type"]
self.comments = process_data["comments"]
self.updateTimePerChannel()
self.dataPointsValidator(None)
def showInformationDialog(self):
self.information_dialog = Information(self.date_type, self.comments, self)
self.information_dialog.accepted.connect(self.informationAccepted)
self.information_dialog.exec_()
def informationAccepted(self):
self.date_type, self.comments = self.information_dialog.getData()
self.information_dialog.close()
def convertTime(self, time, time_measurement):
if time_measurement == 'ms':
return float(time) / 0.001
elif time_measurement == 's':
return float(time)
elif time_measurement == 'us':
return float(time) / 0.000001
def getTime(self):
time = self.time.value()
if self.time_measurement.currentIndex() == 0:
time *= 0.001
elif self.time_measurement.currentIndex() == 1:
pass
elif self.time_measurement.currentIndex() == 2:
time = self.toString(time * 0.000001)
return time
def toString(self, f):
if int(f) < 1:
s = str(f + 1)
temp = s.split('.')
temp[0] = '0'
s = temp[0] + '.' + temp[1]
else:
s = str(f)
return s
def updateTimePerChannel(self):
try:
self.time_per_channel_calculation = self.time.value() / self.channels_calculation
except:
pass
time_measurement = str(self.time_measurement.currentText())
self.time_per_channel.setText(str(round(self.time_per_channel_calculation, 2)) + ' ' + time_measurement)
def dataPointsValidator(self, button):
before = self.before_stimulation.value()
during = self.during_stimulation.value()
after = self.after_stimulation.value()
if (before + during + after) > 512:
if button == 1:
self.before_stimulation.setValue(before - 1)
elif button == 2:
self.during_stimulation.setValue(during - 1)
else:
self.after_stimulation.setValue(after - 1)
else:
self.channels_calculation = before + during + after
self.channels.setText(str(self.channels_calculation))
self.updateTimePerChannel()
def getData(self):
data = "POSL, " + \
str(self.ligth_source.currentText()) + ", " + \
str(self.optical_power.value()) + "%"
all_ = {
"id": self.id,
"light_source": str(self.ligth_source.currentText()),
"start_optical_power": self.optical_power.value(),
"number_scan": self.number_of_scan.value(),
"time": self.getTime(),
"time_unit": str(self.time_measurement.currentText()),
"datapoints1": self.before_stimulation.value(),
"datapoints2": self.during_stimulation.value(),
"datapoints3": self.after_stimulation.value(),
"final_temp": self.final_temperature.value(),
"time_final_temp": self.toString(float(self.getTime()) + self.stabilization.value()),
"heating_rate": self.heating_rate.value(),
"stabilization": self.stabilization.value(),
"date_type": self.date_type,
"comments": self.comments,
"channels": self.channels_calculation,
"timePerChannel": self.time_per_channel_calculation
}
return data, all_
| carlos-ferras/Sequence-ToolKit | controller/gensec/dialogs/processes/posl.py | Python | gpl-3.0 | 6,732 |
#############################################################################
##
## Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
## Contact: http://www.qt-project.org/legal
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Digia. For licensing terms and
## conditions see http://qt.digia.com/licensing. For further information
## use the contact form at http://qt.digia.com/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Digia gives you certain additional
## rights. These rights are described in the Digia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
source("../shared/qmls.py")
source("../../shared/suites_qtta.py")
# go to proper line, make backup, type needed text
def __beginTestSuggestions__(editorArea, lineText, textToType):
# make source code backup to clipboard
type(editorArea, "<Ctrl+a>")
type(editorArea, "<Ctrl+c>")
# place cursor to proper position and start typing
if not placeCursorToLine(editorArea, lineText):
return False
type(editorArea, "<Return>")
type(editorArea, textToType)
return True
# verify whether suggestions makes sense for typed textToType
def verifySuggestions(textToType):
popup = findObject(":popupFrame_Proposal_QListView")
model = popup.model()
for text in dumpItems(model):
test.verify(textToType.lower() in text.lower(),
"Checking whether suggestion '%s' makes sense for typed '%s'"
% (text, textToType))
# restore source code from clipboard backup
def __endTestSuggestions__(editorArea):
type(editorArea, "<Ctrl+a>")
type(editorArea, "<Ctrl+v>")
def testSuggestionsAuto(lineText, textToType, expectedText, keyToUseSuggestion):
# get editor
editorArea = waitForObject(":Qt Creator_QmlJSEditor::QmlJSTextEditorWidget")
# go to proper line, make backup, type needed text
if not __beginTestSuggestions__(editorArea, lineText, textToType):
return False
# check if suggestions are shown
if not test.verify(checkIfObjectExists(":popupFrame_Proposal_QListView"),
"Verifying if suggestions in automatic mode are shown."):
__endTestSuggestions__(editorArea)
return False
# verify proposed suggestions
verifySuggestions(textToType)
# test if suggestion can be selected with keyToUseSuggestion
type(findObject(":popupFrame_Proposal_QListView"), keyToUseSuggestion)
# get text which was written by usage of suggestion
typedText = str(lineUnderCursor(editorArea)).strip()
# verify if expected text is written
test.compare(typedText, expectedText,
"Verifying automatic suggestions usage with: " + keyToUseSuggestion + ", for text: " + textToType)
__endTestSuggestions__(editorArea)
return True
def testSuggestionsManual(lineText, textToType, expectedText):
# get editor
editorArea = waitForObject(":Qt Creator_QmlJSEditor::QmlJSTextEditorWidget")
# go to proper line, make backup, type needed text
if not __beginTestSuggestions__(editorArea, lineText, textToType):
return False
# wait if automatic popup displayed - if yes then fail, because we are in manual mode
if not test.verify(checkIfObjectExists(":popupFrame_Proposal_QListView", False),
"Verifying if suggestions in manual mode are not automatically shown"):
__endTestSuggestions__(editorArea)
return False
# test if suggestion can be invoked manually
if platform.system() == "Darwin":
type(editorArea, "<Meta+Space>")
else:
type(editorArea, "<Ctrl+Space>")
# check if suggestions are shown
if not test.verify(checkIfObjectExists(":popupFrame_Proposal_QListView"),
"Verifying if suggestions in manual mode are shown manually"):
__endTestSuggestions__(editorArea)
return False
# verify proposed suggestions
verifySuggestions(textToType)
# test if suggestion can be used
type(findObject(":popupFrame_Proposal_QListView"), "<Return>")
# get text which was written by usage of suggestion
typedText = str(lineUnderCursor(editorArea)).strip()
# verify if expected text is written
test.compare(typedText, expectedText,
"Verifying manual suggestions usage for text: " + textToType)
__endTestSuggestions__(editorArea)
return True
def saveAndExit():
invokeMenuItem("File", "Save All")
invokeMenuItem("File", "Exit")
def main():
if not startQtCreatorWithNewAppAtQMLEditor(tempDir(), "SampleApp"):
return
# test "color: " suggestion usage with Enter key
if not testSuggestionsAuto("Text {", "col", "color:", "<Return>"):
saveAndExit()
return
# test "color: " suggestion usage with Tab key
if not testSuggestionsAuto("Text {", "col", "color:", "<Tab>"):
saveAndExit()
return
# test "textChanged: " suggestion - automatic insert, because only one suggestion available
shortcutToSuggestions = "<Ctrl+Space>"
if platform.system() == "Darwin":
shortcutToSuggestions = "<Meta+Space>"
if not testSuggestionsAuto("Text {","textChan", "textChanged:", shortcutToSuggestions):
saveAndExit()
return
# change settings to manual insertion of suggestions
changeAutocompleteToManual()
# test manual suggestions
testSuggestionsManual("Text {", "col", "color:")
# exit qt creator
saveAndExit()
| maui-packages/qt-creator | tests/system/suite_QMLS/tst_QMLS01/test.py | Python | lgpl-2.1 | 6,329 |
#!/usr/bin/env python
## \file set_ffd_design_var.py.py
# \brief Python script for automatically generating a list of FFD variables.
# \author T. Economon, F. Palacios
# \version 4.0.0 "Cardinal"
#
#
# SU2 Lead Developers: Dr. Francisco Palacios (Francisco.D.Palacios@boeing.com).
# Dr. Thomas D. Economon (economon@stanford.edu).
#
# SU2 Developers: Prof. Juan J. Alonso's group at Stanford University.
# Prof. Piero Colonna's group at Delft University of Technology.
# Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# Prof. Alberto Guardone's group at Polytechnic University of Milan.
# Prof. Rafael Palacios' group at Imperial College London.
#
# Copyright (C) 2012-2015 SU2, the open-source CFD code.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
import os, time
from optparse import OptionParser
from numpy import *
parser = OptionParser()
parser.add_option("-i", "--iDegree", dest="iDegree", default=4,
help="i degree of the FFD box", metavar="IDEGREE")
parser.add_option("-j", "--jDegree", dest="jDegree", default=4,
help="j degree of the FFD box", metavar="JDEGREE")
parser.add_option("-k", "--kDegree", dest="kDegree", default=1,
help="k degree of the FFD box", metavar="KDEGREE")
parser.add_option("-b", "--ffdid", dest="ffd_id", default=0,
help="ID of the FFD box", metavar="FFD_ID")
parser.add_option("-m", "--marker", dest="marker",
help="marker name of the design surface", metavar="MARKER")
parser.add_option("-s", "--scale", dest="scale", default=1.0,
help="scale factor for the bump functions", metavar="SCALE")
parser.add_option("-d", "--dimension", dest="dimension", default=3.0,
help="dimension of the problem", metavar="DIMENSION")
(options, args)=parser.parse_args()
# Process options
options.iOrder = int(options.iDegree) + 1
options.jOrder = int(options.jDegree) + 1
options.kOrder = int(options.kDegree) + 1
options.ffd_id = str(options.ffd_id)
options.marker = str(options.marker)
options.scale = float(options.scale)
options.dim = int(options.dimension)
if options.dim == 3:
print " "
print "FFD_CONTROL_POINT"
iVariable = 0
dvList = "DEFINITION_DV= "
for kIndex in range(options.kOrder):
for jIndex in range(options.jOrder):
for iIndex in range(options.iOrder):
iVariable = iVariable + 1
dvList = dvList + "( 7, " + str(options.scale) + " | " + options.marker + " | "
dvList = dvList + options.ffd_id + ", " + str(iIndex) + ", " + str(jIndex) + ", " + str(kIndex) + ", 0.0, 0.0, 1.0 )"
if iVariable < (options.iOrder*(options.jOrder)*options.kOrder):
dvList = dvList + "; "
print dvList
print " "
print "FFD_CAMBER & FFD_THICKNESS"
iVariable = 0
dvList = "DEFINITION_DV= "
for jIndex in range(options.jOrder):
for iIndex in range(options.iOrder):
iVariable = iVariable + 1
dvList = dvList + "( 11, " + str(options.scale) + " | " + options.marker + " | "
dvList = dvList + options.ffd_id + ", " + str(iIndex) + ", " + str(jIndex) + " )"
dvList = dvList + "; "
iVariable = 0
for jIndex in range(options.jOrder):
for iIndex in range(options.iOrder):
iVariable = iVariable + 1
dvList = dvList + "( 12, " + str(options.scale) + " | " + options.marker + " | "
dvList = dvList + options.ffd_id + ", " + str(iIndex) + ", " + str(jIndex) + " )"
if iVariable < (options.iOrder*(options.jOrder)):
dvList = dvList + "; "
print dvList
if options.dim == 2:
iVariable = 0
dvList = "DEFINITION_DV= "
for jIndex in range(options.jOrder):
for iIndex in range(options.iOrder):
iVariable = iVariable + 1
dvList = dvList + "( 15, " + str(options.scale) + " | " + options.marker + " | "
dvList = dvList + options.ffd_id + ", " + str(iIndex) + ", " + str(jIndex) + ", 0.0, 1.0 )"
if iVariable < (options.iOrder*options.jOrder):
dvList = dvList + "; "
print dvList
print " "
print "FFD_CAMBER & FFD_THICKNESS"
iVariable = 0
dvList = "DEFINITION_DV= "
for iIndex in range(options.iOrder):
iVariable = iVariable + 1
dvList = dvList + "( 16, " + str(options.scale) + " | " + options.marker + " | "
dvList = dvList + options.ffd_id + ", " + str(iIndex) + " )"
dvList = dvList + "; "
iVariable = 0
for iIndex in range(options.iOrder):
iVariable = iVariable + 1
dvList = dvList + "( 17, " + str(options.scale) + " | " + options.marker + " | "
dvList = dvList + options.ffd_id + ", " + str(iIndex) + " )"
if iVariable < (options.iOrder):
dvList = dvList + "; "
print dvList
| huahbo/SU2 | SU2_PY/set_ffd_design_var.py | Python | lgpl-2.1 | 5,373 |
# -*- coding: utf-8 -*-
import pycurl
import re
import time
from module.common.json_layer import json_loads
from module.plugins.internal.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class RapiduNet(SimpleHoster):
__name__ = "RapiduNet"
__type__ = "hoster"
__version__ = "0.09"
__pattern__ = r'https?://(?:www\.)?rapidu\.net/(?P<ID>\d{10})'
__config__ = [("use_premium", "bool", "Use premium account if available", True)]
__description__ = """Rapidu.net hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("prOq", "")]
COOKIES = [("rapidu.net", "rapidu_lang", "en")]
INFO_PATTERN = r'<h1 title="(?P<N>.*)">.*</h1>\s*<small>(?P<S>\d+(\.\d+)?)\s(?P<U>\w+)</small>'
OFFLINE_PATTERN = r'<h1>404'
ERROR_PATTERN = r'<div class="error">'
RECAPTCHA_KEY = r'6Ld12ewSAAAAAHoE6WVP_pSfCdJcBQScVweQh8Io'
def setup(self):
self.resumeDownload = True
self.multiDL = self.premium
def handleFree(self, pyfile):
self.req.http.lastURL = pyfile.url
self.req.http.c.setopt(pycurl.HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
jsvars = self.getJsonResponse("https://rapidu.net/ajax.php",
get={'a': "getLoadTimeToDownload"},
post={'_go': ""},
decode=True)
if str(jsvars['timeToDownload']) is "stop":
t = (24 * 60 * 60) - (int(time.time()) % (24 * 60 * 60)) + time.altzone
self.logInfo("You've reach your daily download transfer")
self.retry(10, 10 if t < 1 else None, _("Try tomorrow again")) #@NOTE: check t in case of not synchronised clock
else:
self.wait(int(jsvars['timeToDownload']) - int(time.time()))
recaptcha = ReCaptcha(self)
response, challenge = recaptcha.challenge(self.RECAPTCHA_KEY)
jsvars = self.getJsonResponse("https://rapidu.net/ajax.php",
get={'a': "getCheckCaptcha"},
post={'_go' : "",
'captcha1': challenge,
'captcha2': response,
'fileId' : self.info['pattern']['ID']},
decode=True)
if jsvars['message'] == 'success':
self.link = jsvars['url']
def getJsonResponse(self, *args, **kwargs):
res = self.load(*args, **kwargs)
if not res.startswith('{'):
self.retry()
self.logDebug(res)
return json_loads(res)
getInfo = create_getInfo(RapiduNet)
| Zerknechterer/pyload | module/plugins/hoster/RapiduNet.py | Python | gpl-3.0 | 2,781 |
import os
import sys
import math
import time
import random
import bintrees
import blist
import btree
import BTrees.OOBTree
sys.path.extend(['..', '../..'])
import banyan
import _src
def _bintrees(es):
t = bintrees.FastRBTree([(e, None) for e in es])
def _blist(es):
t = blist.sortedset(es)
def _set(es):
t = set(es)
def _banyan(es, key_type, alg):
t = banyan.SortedSet(es, key_type = key_type, alg = alg)
def _run_test(fn, type_, num_items, num_its):
if type_ == int:
es = _src.random_ints(num_items)
elif type_ == str:
es = _src.random_strings(num_items, 8)
else:
assert False
start = time.time()
for i in range(num_its):
fn(es)
end = time.time()
diff = (end - start) / num_its
return diff
def run_tests(names, num_items, num_its, type_ = int):
fns = dict([
('btree', lambda es: btree.sorted_btree.bulkload(sorted(es), 128)),
('bintrees', lambda es: _bintrees(es)),
('blist', lambda es: _blist(es)),
('btrees', lambda es: BTrees.OOBTree.OOBTree([(e, 1) for e in es])),
('set', lambda es: _set(es)),
('banyan_red_black_tree', lambda es: _banyan(es, type_, banyan.RED_BLACK_TREE)),
('banyan_splay_tree', lambda es: _banyan(es, type_, banyan.SPLAY_TREE)),
('banyan_sorted_list', lambda es: _banyan(es, type_, banyan.SORTED_LIST)),
('banyan_red_black_tree_gen', lambda es: _banyan(es, None, banyan.RED_BLACK_TREE)),
('banyan_splay_tree_gen', lambda es: _banyan(es, None, banyan.SPLAY_TREE)),
('banyan_sorted_list_gen', lambda es: _banyan(es, None, banyan.SORTED_LIST))])
t = dict([])
for name in names:
t[name] = _run_test(fns[name], type_, num_items, num_its)
return t
| pyannote/pyannote-banyan | performance_tests/_set_create.py | Python | bsd-3-clause | 1,814 |
from urllib.request import urlopen
import urllib.error
import twurl
import json
import sqlite3
TWITTER_URL = 'https://api.twitter.com/1.1/friends/list.json'
conn = sqlite3.connect('spider.sqlite')
cur = conn.cursor()
cur.execute('''
CREATE TABLE IF NOT EXISTS Twitter
(name TEXT, retrieved INTEGER, friends INTEGER)''')
while True:
acct = input('Enter a Twitter account, or quit: ')
if ( acct == 'quit' ) : break
if ( len(acct) < 1 ) :
cur.execute('SELECT name FROM Twitter WHERE retrieved = 0 LIMIT 1')
try:
acct = cur.fetchone()[0]
except:
print('No unretrieved Twitter accounts found')
continue
url = twurl.augment(TWITTER_URL, {'screen_name': acct, 'count': '5'} )
print('Retrieving', url)
connection = urlopen(url)
data = connection.read().decode()
headers = dict(connection.getheaders())
print('Remaining', headers['x-rate-limit-remaining'])
js = json.loads(data)
# print json.dumps(js, indent=4)
cur.execute('UPDATE Twitter SET retrieved=1 WHERE name = ?', (acct, ) )
countnew = 0
countold = 0
for u in js['users'] :
friend = u['screen_name']
print(friend)
cur.execute('SELECT friends FROM Twitter WHERE name = ? LIMIT 1',
(friend, ) )
try:
count = cur.fetchone()[0]
cur.execute('UPDATE Twitter SET friends = ? WHERE name = ?',
(count+1, friend) )
countold = countold + 1
except:
cur.execute('''INSERT INTO Twitter (name, retrieved, friends)
VALUES ( ?, 0, 1 )''', ( friend, ) )
countnew = countnew + 1
print('New accounts=',countnew,' revisited=',countold)
conn.commit()
cur.close()
| mkhuthir/learnPython | Book_pythonlearn_com/twitter/twspider.py | Python | mit | 1,781 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, print_function, unicode_literals, absolute_import
"""
This module implements classes for reading and generating Lammps inputset.
For the ease of management we divide LAMMPS input into 2 files:
1.Data file: All structure related settings such as the atomic positions,
bonds, angles, dihedrals, corresponding parametrizations etc are
set in the data file.
2. Control/input file: This is the main input file that should be fed to the
lammps binary. The main input file consists of the path to the
afore-mentioned data file and the job control parameters such as
the ensemble type(NVT, NPT etc), max number of iterations etc.
"""
import os
import six
from monty.json import MSONable, MontyDecoder
from pymatgen.io.lammps.data import LammpsData
from pymatgen.io.lammps.input import LammpsInput
__author__ = "Kiran Mathew"
__email__ = "kmathew@lbl.gov"
class LammpsInputSet(MSONable):
def __init__(self, name, lammps_input, lammps_data=None,
data_filename="in.data", user_lammps_settings=None):
"""
Implementation of LammpsInputSet that is initialized from a dict
settings. It is typically used by other LammpsInputSets for
initialization from json or yaml source files.
Args:
name (str): A name for the input set.
lammps_input (LammpsInput): The config dictionary to use.
lammps_data (LammpsData): LammpsData object
data_filename (str): name of the the lammps data file.
Note: this will override the value for 'data_file' key in lammps_input
user_lammps_settings (dict): User lammps settings. This allows a user
to override lammps settings, e.g., setting a different force field
or bond type.
"""
self.name = name
self.lines = []
self.lammps_input = lammps_input
self.lammps_data = lammps_data
self.data_filename = data_filename
self.lammps_input.settings["data_file"] = data_filename
self.user_lammps_settings = user_lammps_settings or {}
self.lammps_input.settings.update(self.user_lammps_settings)
def write_input(self, input_filename, data_filename=None):
"""
Get the string representation of the main input file and write it.
Also writes the data file if the lammps_data attribute is set.
Args:
input_filename (string): name of the input file
data_filename (string): override the data file name with this
"""
if data_filename:
data_filename = os.path.abspath(os.path.join(os.getcwd(), data_filename))
if data_filename and ("data_file" in self.lammps_input.settings):
self.lammps_input.settings["data_file"] = data_filename
self.data_filename = data_filename
self.lammps_input.write_file(input_filename)
# write the data file if present
if self.lammps_data:
self.lammps_data.write_file(filename=self.data_filename)
@classmethod
def from_file(cls, name, input_template, user_settings,
lammps_data=None, data_filename="in.data"):
"""
Returns LammpsInputSet from input file template and input data.
Args:
name (str)
input_template (string): path to the input template file.
user_settings (dict): User lammps settings, the keys must
correspond to the keys in the template.
lammps_data (string/LammpsData): path to the
data file or an appropriate object
data_filename (string): name of the the lammps data file.
Returns:
LammpsInputSet
"""
user_settings["data_file"] = data_filename
lammps_input = LammpsInput.from_file(input_template, user_settings)
if isinstance(lammps_data, six.string_types):
lammps_data = LammpsData.from_file(lammps_data)
return cls(name, lammps_input, lammps_data=lammps_data,
data_filename=data_filename)
def as_dict(self):
d = MSONable.as_dict(self)
if hasattr(self, "kwargs"):
d.update(**self.kwargs)
d["lammps_input"] = self.lammps_input.as_dict()
return d
@classmethod
def from_dict(cls, d):
decoded = {k: MontyDecoder().process_decoded(v) for k, v in d.items()
if k not in ["@module", "@class", "lammps_input"]}
decoded["lammps_input"] = LammpsInput.from_dict(d["lammps_input"])
return cls(**decoded)
| johnson1228/pymatgen | pymatgen/io/lammps/sets.py | Python | mit | 4,785 |
# Generated by Django 2.2.13 on 2020-06-26 20:53
from django.db import migrations, models
import django.db.models.deletion
# This migration will remove the default='H' kwarg from the `coordinate_acquisition_code` field
# definition on ActivitySubmission. This default 'H' value is used when creating a new
# ActivitySubmission and the coordinate_acquisition_code is not set. The logic below details how to
# fix (as best as possible) the well's correct value of coordinate_acquisition_code:
# LEGACY:
# - if LEGACY submission coordinate_acquisition_code is H then check well (only if this well has
# no other submissions) to see what the value of coordinate_acquisition_code is:
# - if LEGACY and well match (NOT NULL) = ok
# - if LEGACY = H (default H) and well is NULL then well should have been defaulted to H = ok
# CON
# - always default to H = ok
# ALT & DEC:
# - new submission (default to H = ok)
# - existing submission (well_tag_number exists in payload):
# - if new coords and coordinate_acquisition_code is H (possible default to H = ok)
# - if new coords and coordinate_acquisition_code is !H (set by sub = ok)
# - if no coords and coordinate_acquisition_code is H (default to H = bad?)
# - if no coords and coordinate_acquisition_code is !H (set by sub = ok)
# STAFF_EDIT:
# - if coordinate_acquisition_code is given then store (no default H = ok)
# - if coordinate_acquisition_code is _not_ given yes new coords (default to H = ok)
# - if coordinate_acquisition_code is _not_ given no new coords (default to H = bad)
# Shows well submission history for wells that have had a H coordinate_acquisition_code sub but then filter out all the default-H subs to view only the actual updates
WELLS_WITH_H_SUBMISSION_HISTORY = """
SELECT act.well_activity_code, act.well_tag_number, act.filing_number, act.coordinate_acquisition_code, fp.coordinate_acquisition_code AS user_set, act.create_user, act.create_date
FROM activity_submission act
LEFT JOIN fields_provided fp ON act.filing_number = fp.filing_number
WHERE
well_tag_number IN (
SELECT well_tag_number
FROM activity_submission
WHERE
coordinate_acquisition_code = 'H'
GROUP BY well_tag_number
)
AND fp.coordinate_acquisition_code = true
ORDER BY well_tag_number, create_date;
"""
# Finds the wells which have had a default-'H' submission order by the number of times they have had a non-H submission showing only those which have had more then 1 sub
WELLS_WITH_MORE_THEN_ONE_H_SUBMISSION = """
SELECT COUNT(act.well_tag_number) AS num, act.well_tag_number
FROM activity_submission act
LEFT JOIN fields_provided fp ON act.filing_number = fp.filing_number
WHERE
well_tag_number IN (
SELECT well_tag_number
FROM activity_submission
WHERE
coordinate_acquisition_code = 'H'
GROUP BY well_tag_number
)
AND fp.coordinate_acquisition_code = true
GROUP BY act.well_tag_number
HAVING COUNT(act.well_tag_number) > 1
ORDER BY num DESC, well_tag_number;
"""
# Shows latest submission for a well that has a non-default-H submission
LATEST_REAL_STAFF_EDIT_OF_COORDINATE_ACQUISITION_CODE = """
SELECT act.well_activity_code, act.well_tag_number, act.filing_number, act.coordinate_acquisition_code, act.create_user, act.create_date
FROM well w
INNER JOIN activity_submission act ON act.well_tag_number = w.well_tag_number
WHERE
act.filing_number = (
SELECT MAX(act2.filing_number) AS recent_filing_number
FROM activity_submission act2
LEFT JOIN fields_provided fp ON act2.filing_number = fp.filing_number
WHERE
act2.well_tag_number = w.well_tag_number AND
act2.well_activity_code = 'STAFF_EDIT' AND
fp.coordinate_acquisition_code = true
)
AND w.coordinate_acquisition_code != act.coordinate_acquisition_code
ORDER BY well_tag_number;
""" # ~275 records
# Shows any ALT or DEC sub with an H coordinate_acquisition_code with no new geom
WELLS_WITH_DEFAULT_H_ALT_OR_DEC = """
SELECT act.well_activity_code, act.well_tag_number, act.filing_number, act.coordinate_acquisition_code, fp.coordinate_acquisition_code AS user_set, act.geom IS NOT NULL AS geom_changed, act.create_user, act.create_date
FROM activity_submission act
LEFT JOIN fields_provided fp ON act.filing_number = fp.filing_number
WHERE
well_tag_number IN (
SELECT well_tag_number
FROM activity_submission
WHERE
well_activity_code IN ('DEC', 'ALT') AND
coordinate_acquisition_code = 'H' AND
geom IS NULL
)
ORDER BY well_tag_number, create_date;
"""
# Count wells that had a ALT or DEC sub where it no new geom and coordinate_acquisition_code = 'H' = DEC sub always set default-H
NUM_WELLS_WITH_DEFAULT_H_ALT_OR_DEC = """
SELECT COUNT(well_tag_number), well_activity_code
FROM activity_submission
WHERE
well_activity_code IN ('DEC', 'ALT') AND
coordinate_acquisition_code = 'H' AND
geom IS NULL
GROUP BY well_activity_code;
"""
class Migration(migrations.Migration):
dependencies = [
('wells', '0116_not_null_well_class_and_intended_water_use_20200619_2053'),
]
operations = [
migrations.AlterField(
model_name='activitysubmission',
name='coordinate_acquisition_code',
field=models.ForeignKey(blank=True, db_column='coordinate_acquisition_code', null=True, on_delete=django.db.models.deletion.PROTECT, to='wells.CoordinateAcquisitionCode', verbose_name='Location Accuracy Code'),
),
]
| bcgov/gwells | app/backend/wells/migrations/0117_no_default_coord_code_20200626_2053.py | Python | apache-2.0 | 5,606 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 NetApp, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the NetApp-specific NFS driver module (netapp_nfs)."""
from cinder import context
from cinder import exception
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers.netapp import api
from cinder.volume.drivers.netapp import nfs as netapp_nfs
from cinder.volume.drivers import nfs
from lxml import etree
from mox import IgnoreArg
from mox import IsA
from mox import MockObject
import mox
import suds
import types
def create_configuration():
configuration = mox.MockObject(conf.Configuration)
configuration.append_config_values(mox.IgnoreArg())
return configuration
class FakeVolume(object):
def __init__(self, size=0):
self.size = size
self.id = hash(self)
self.name = None
def __getitem__(self, key):
return self.__dict__[key]
class FakeSnapshot(object):
def __init__(self, volume_size=0):
self.volume_name = None
self.name = None
self.volume_id = None
self.volume_size = volume_size
self.user_id = None
self.status = None
def __getitem__(self, key):
return self.__dict__[key]
class FakeResponce(object):
def __init__(self, status):
"""
:param status: Either 'failed' or 'passed'
"""
self.Status = status
if status == 'failed':
self.Reason = 'Sample error'
class NetappNfsDriverTestCase(test.TestCase):
"""Test case for NetApp specific NFS clone driver."""
def setUp(self):
self._mox = mox.Mox()
self._driver = netapp_nfs.NetAppNFSDriver(
configuration=create_configuration())
def tearDown(self):
self._mox.UnsetStubs()
def test_check_for_setup_error(self):
mox = self._mox
drv = self._driver
required_flags = ['netapp_wsdl_url',
'netapp_login',
'netapp_password',
'netapp_server_hostname',
'netapp_server_port']
# set required flags
for flag in required_flags:
setattr(drv.configuration, flag, None)
# check exception raises when flags are not set
self.assertRaises(exception.CinderException,
drv.check_for_setup_error)
# set required flags
for flag in required_flags:
setattr(drv.configuration, flag, 'val')
mox.StubOutWithMock(nfs.NfsDriver, 'check_for_setup_error')
nfs.NfsDriver.check_for_setup_error()
mox.ReplayAll()
drv.check_for_setup_error()
mox.VerifyAll()
# restore initial FLAGS
for flag in required_flags:
delattr(drv.configuration, flag)
def test_do_setup(self):
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, 'check_for_setup_error')
mox.StubOutWithMock(drv, '_get_client')
drv.check_for_setup_error()
drv._get_client()
mox.ReplayAll()
drv.do_setup(IsA(context.RequestContext))
mox.VerifyAll()
def test_create_snapshot(self):
"""Test snapshot can be created and deleted."""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_clone_volume')
drv._clone_volume(IgnoreArg(), IgnoreArg(), IgnoreArg())
mox.ReplayAll()
drv.create_snapshot(FakeSnapshot())
mox.VerifyAll()
def test_create_volume_from_snapshot(self):
"""Tests volume creation from snapshot."""
drv = self._driver
mox = self._mox
volume = FakeVolume(1)
snapshot = FakeSnapshot(2)
self.assertRaises(exception.CinderException,
drv.create_volume_from_snapshot,
volume,
snapshot)
snapshot = FakeSnapshot(1)
location = '127.0.0.1:/nfs'
expected_result = {'provider_location': location}
mox.StubOutWithMock(drv, '_clone_volume')
mox.StubOutWithMock(drv, '_get_volume_location')
drv._clone_volume(IgnoreArg(), IgnoreArg(), IgnoreArg())
drv._get_volume_location(IgnoreArg()).AndReturn(location)
mox.ReplayAll()
loc = drv.create_volume_from_snapshot(volume, snapshot)
self.assertEquals(loc, expected_result)
mox.VerifyAll()
def _prepare_delete_snapshot_mock(self, snapshot_exists):
drv = self._driver
mox = self._mox
mox.StubOutWithMock(drv, '_get_provider_location')
mox.StubOutWithMock(drv, '_volume_not_present')
if snapshot_exists:
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_get_volume_path')
drv._get_provider_location(IgnoreArg())
drv._volume_not_present(IgnoreArg(),
IgnoreArg()).AndReturn(not snapshot_exists)
if snapshot_exists:
drv._get_volume_path(IgnoreArg(), IgnoreArg())
drv._execute('rm', None, run_as_root=True)
mox.ReplayAll()
return mox
def test_delete_existing_snapshot(self):
drv = self._driver
mox = self._prepare_delete_snapshot_mock(True)
drv.delete_snapshot(FakeSnapshot())
mox.VerifyAll()
def test_delete_missing_snapshot(self):
drv = self._driver
mox = self._prepare_delete_snapshot_mock(False)
drv.delete_snapshot(FakeSnapshot())
mox.VerifyAll()
def _prepare_clone_mock(self, status):
drv = self._driver
mox = self._mox
volume = FakeVolume()
setattr(volume, 'provider_location', '127.0.0.1:/nfs')
drv._client = MockObject(suds.client.Client)
drv._client.factory = MockObject(suds.client.Factory)
drv._client.service = MockObject(suds.client.ServiceSelector)
# ApiProxy() method is generated by ServiceSelector at runtime from the
# XML, so mocking is impossible.
setattr(drv._client.service,
'ApiProxy',
types.MethodType(lambda *args, **kwargs: FakeResponce(status),
suds.client.ServiceSelector))
mox.StubOutWithMock(drv, '_get_host_id')
mox.StubOutWithMock(drv, '_get_full_export_path')
drv._get_host_id(IgnoreArg()).AndReturn('10')
drv._get_full_export_path(IgnoreArg(), IgnoreArg()).AndReturn('/nfs')
return mox
def test_successfull_clone_volume(self):
drv = self._driver
mox = self._prepare_clone_mock('passed')
# set required flags
setattr(drv.configuration, 'synchronous_snapshot_create', False)
mox.ReplayAll()
volume_name = 'volume_name'
clone_name = 'clone_name'
volume_id = volume_name + str(hash(volume_name))
drv._clone_volume(volume_name, clone_name, volume_id)
mox.VerifyAll()
def test_failed_clone_volume(self):
drv = self._driver
mox = self._prepare_clone_mock('failed')
mox.ReplayAll()
volume_name = 'volume_name'
clone_name = 'clone_name'
volume_id = volume_name + str(hash(volume_name))
self.assertRaises(exception.CinderException,
drv._clone_volume,
volume_name, clone_name, volume_id)
mox.VerifyAll()
def test_cloned_volume_size_fail(self):
volume_clone_fail = FakeVolume(1)
volume_src = FakeVolume(2)
try:
self._driver.create_cloned_volume(volume_clone_fail,
volume_src)
raise AssertionError()
except exception.CinderException:
pass
class NetappCmodeNfsDriverTestCase(test.TestCase):
"""Test case for NetApp C Mode specific NFS clone driver"""
def setUp(self):
self._mox = mox.Mox()
self._custom_setup()
def _custom_setup(self):
self._driver = netapp_nfs.NetAppCmodeNfsDriver(
configuration=create_configuration())
def tearDown(self):
self._mox.UnsetStubs()
def test_check_for_setup_error(self):
mox = self._mox
drv = self._driver
required_flags = [
'netapp_wsdl_url',
'netapp_login',
'netapp_password',
'netapp_server_hostname',
'netapp_server_port']
# set required flags
for flag in required_flags:
setattr(drv.configuration, flag, None)
# check exception raises when flags are not set
self.assertRaises(exception.CinderException,
drv.check_for_setup_error)
# set required flags
for flag in required_flags:
setattr(drv.configuration, flag, 'val')
mox.ReplayAll()
drv.check_for_setup_error()
mox.VerifyAll()
# restore initial FLAGS
for flag in required_flags:
delattr(drv.configuration, flag)
def test_do_setup(self):
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, 'check_for_setup_error')
mox.StubOutWithMock(drv, '_get_client')
drv.check_for_setup_error()
drv._get_client()
mox.ReplayAll()
drv.do_setup(IsA(context.RequestContext))
mox.VerifyAll()
def test_create_snapshot(self):
"""Test snapshot can be created and deleted"""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_clone_volume')
drv._clone_volume(IgnoreArg(), IgnoreArg(), IgnoreArg())
mox.ReplayAll()
drv.create_snapshot(FakeSnapshot())
mox.VerifyAll()
def test_create_volume_from_snapshot(self):
"""Tests volume creation from snapshot"""
drv = self._driver
mox = self._mox
volume = FakeVolume(1)
snapshot = FakeSnapshot(2)
self.assertRaises(exception.CinderException,
drv.create_volume_from_snapshot,
volume,
snapshot)
snapshot = FakeSnapshot(1)
location = '127.0.0.1:/nfs'
expected_result = {'provider_location': location}
mox.StubOutWithMock(drv, '_clone_volume')
mox.StubOutWithMock(drv, '_get_volume_location')
drv._clone_volume(IgnoreArg(), IgnoreArg(), IgnoreArg())
drv._get_volume_location(IgnoreArg()).AndReturn(location)
mox.ReplayAll()
loc = drv.create_volume_from_snapshot(volume, snapshot)
self.assertEquals(loc, expected_result)
mox.VerifyAll()
def _prepare_delete_snapshot_mock(self, snapshot_exists):
drv = self._driver
mox = self._mox
mox.StubOutWithMock(drv, '_get_provider_location')
mox.StubOutWithMock(drv, '_volume_not_present')
if snapshot_exists:
mox.StubOutWithMock(drv, '_execute')
mox.StubOutWithMock(drv, '_get_volume_path')
drv._get_provider_location(IgnoreArg())
drv._volume_not_present(IgnoreArg(), IgnoreArg())\
.AndReturn(not snapshot_exists)
if snapshot_exists:
drv._get_volume_path(IgnoreArg(), IgnoreArg())
drv._execute('rm', None, run_as_root=True)
mox.ReplayAll()
return mox
def test_delete_existing_snapshot(self):
drv = self._driver
mox = self._prepare_delete_snapshot_mock(True)
drv.delete_snapshot(FakeSnapshot())
mox.VerifyAll()
def test_delete_missing_snapshot(self):
drv = self._driver
mox = self._prepare_delete_snapshot_mock(False)
drv.delete_snapshot(FakeSnapshot())
mox.VerifyAll()
def _prepare_clone_mock(self, status):
drv = self._driver
mox = self._mox
volume = FakeVolume()
setattr(volume, 'provider_location', '127.0.0.1:/nfs')
drv._client = MockObject(suds.client.Client)
drv._client.factory = MockObject(suds.client.Factory)
drv._client.service = MockObject(suds.client.ServiceSelector)
# CloneNasFile method is generated by ServiceSelector at runtime from
# the
# XML, so mocking is impossible.
setattr(drv._client.service,
'CloneNasFile',
types.MethodType(lambda *args, **kwargs: FakeResponce(status),
suds.client.ServiceSelector))
mox.StubOutWithMock(drv, '_get_host_ip')
mox.StubOutWithMock(drv, '_get_export_path')
drv._get_host_ip(IgnoreArg()).AndReturn('127.0.0.1')
drv._get_export_path(IgnoreArg()).AndReturn('/nfs')
return mox
def test_clone_volume(self):
drv = self._driver
mox = self._prepare_clone_mock('passed')
mox.ReplayAll()
volume_name = 'volume_name'
clone_name = 'clone_name'
volume_id = volume_name + str(hash(volume_name))
drv._clone_volume(volume_name, clone_name, volume_id)
mox.VerifyAll()
def test_cloned_volume_size_fail(self):
volume_clone_fail = FakeVolume(1)
volume_src = FakeVolume(2)
try:
self._driver.create_cloned_volume(volume_clone_fail,
volume_src)
raise AssertionError()
except exception.CinderException:
pass
class NetappDirectCmodeNfsDriverTestCase(NetappCmodeNfsDriverTestCase):
"""Test direct NetApp C Mode driver"""
def _custom_setup(self):
self._driver = netapp_nfs.NetAppDirectCmodeNfsDriver(
configuration=create_configuration())
def test_check_for_setup_error(self):
mox = self._mox
drv = self._driver
required_flags = [
'netapp_transport_type',
'netapp_login',
'netapp_password',
'netapp_server_hostname',
'netapp_server_port']
# set required flags
for flag in required_flags:
setattr(drv.configuration, flag, None)
# check exception raises when flags are not set
self.assertRaises(exception.CinderException,
drv.check_for_setup_error)
# set required flags
for flag in required_flags:
setattr(drv.configuration, flag, 'val')
mox.ReplayAll()
drv.check_for_setup_error()
mox.VerifyAll()
# restore initial FLAGS
for flag in required_flags:
delattr(drv.configuration, flag)
def test_do_setup(self):
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, 'check_for_setup_error')
mox.StubOutWithMock(drv, '_get_client')
mox.StubOutWithMock(drv, '_do_custom_setup')
drv.check_for_setup_error()
drv._get_client()
drv._do_custom_setup(IgnoreArg())
mox.ReplayAll()
drv.do_setup(IsA(context.RequestContext))
mox.VerifyAll()
def _prepare_clone_mock(self, status):
drv = self._driver
mox = self._mox
volume = FakeVolume()
setattr(volume, 'provider_location', '127.0.0.1:/nfs')
mox.StubOutWithMock(drv, '_get_host_ip')
mox.StubOutWithMock(drv, '_get_export_path')
mox.StubOutWithMock(drv, '_get_if_info_by_ip')
mox.StubOutWithMock(drv, '_get_vol_by_junc_vserver')
mox.StubOutWithMock(drv, '_clone_file')
drv._get_host_ip(IgnoreArg()).AndReturn('127.0.0.1')
drv._get_export_path(IgnoreArg()).AndReturn('/nfs')
drv._get_if_info_by_ip('127.0.0.1').AndReturn(
self._prepare_info_by_ip_response())
drv._get_vol_by_junc_vserver('openstack', '/nfs').AndReturn('nfsvol')
drv._clone_file('nfsvol', 'volume_name', 'clone_name',
'openstack')
return mox
def _prepare_info_by_ip_response(self):
res = """<attributes-list>
<net-interface-info>
<address>127.0.0.1</address>
<administrative-status>up</administrative-status>
<current-node>fas3170rre-cmode-01</current-node>
<current-port>e1b-1165</current-port>
<data-protocols>
<data-protocol>nfs</data-protocol>
</data-protocols>
<dns-domain-name>none</dns-domain-name>
<failover-group/>
<failover-policy>disabled</failover-policy>
<firewall-policy>data</firewall-policy>
<home-node>fas3170rre-cmode-01</home-node>
<home-port>e1b-1165</home-port>
<interface-name>nfs_data1</interface-name>
<is-auto-revert>false</is-auto-revert>
<is-home>true</is-home>
<netmask>255.255.255.0</netmask>
<netmask-length>24</netmask-length>
<operational-status>up</operational-status>
<role>data</role>
<routing-group-name>c10.63.165.0/24</routing-group-name>
<use-failover-group>disabled</use-failover-group>
<vserver>openstack</vserver>
</net-interface-info></attributes-list>"""
response_el = etree.XML(res)
return api.NaElement(response_el).get_children()
def test_clone_volume(self):
drv = self._driver
mox = self._prepare_clone_mock('pass')
mox.ReplayAll()
volume_name = 'volume_name'
clone_name = 'clone_name'
volume_id = volume_name + str(hash(volume_name))
drv._clone_volume(volume_name, clone_name, volume_id)
mox.VerifyAll()
class NetappDirect7modeNfsDriverTestCase(NetappDirectCmodeNfsDriverTestCase):
"""Test direct NetApp C Mode driver"""
def _custom_setup(self):
self._driver = netapp_nfs.NetAppDirect7modeNfsDriver(
configuration=create_configuration())
def test_check_for_setup_error(self):
mox = self._mox
drv = self._driver
required_flags = [
'netapp_transport_type',
'netapp_login',
'netapp_password',
'netapp_server_hostname',
'netapp_server_port']
# set required flags
for flag in required_flags:
setattr(drv.configuration, flag, None)
# check exception raises when flags are not set
self.assertRaises(exception.CinderException,
drv.check_for_setup_error)
# set required flags
for flag in required_flags:
setattr(drv.configuration, flag, 'val')
mox.ReplayAll()
drv.check_for_setup_error()
mox.VerifyAll()
# restore initial FLAGS
for flag in required_flags:
delattr(drv.configuration, flag)
def test_do_setup(self):
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, 'check_for_setup_error')
mox.StubOutWithMock(drv, '_get_client')
mox.StubOutWithMock(drv, '_do_custom_setup')
drv.check_for_setup_error()
drv._get_client()
drv._do_custom_setup(IgnoreArg())
mox.ReplayAll()
drv.do_setup(IsA(context.RequestContext))
mox.VerifyAll()
def _prepare_clone_mock(self, status):
drv = self._driver
mox = self._mox
volume = FakeVolume()
setattr(volume, 'provider_location', '127.0.0.1:/nfs')
mox.StubOutWithMock(drv, '_get_export_path')
mox.StubOutWithMock(drv, '_get_actual_path_for_export')
mox.StubOutWithMock(drv, '_start_clone')
mox.StubOutWithMock(drv, '_wait_for_clone_finish')
if status == 'fail':
mox.StubOutWithMock(drv, '_clear_clone')
drv._get_export_path(IgnoreArg()).AndReturn('/nfs')
drv._get_actual_path_for_export(IgnoreArg()).AndReturn('/vol/vol1/nfs')
drv._start_clone(IgnoreArg(), IgnoreArg()).AndReturn(('1', '2'))
if status == 'fail':
drv._wait_for_clone_finish('1', '2').AndRaise(
api.NaApiError('error', 'error'))
drv._clear_clone('1')
else:
drv._wait_for_clone_finish('1', '2')
return mox
def test_clone_volume_clear(self):
drv = self._driver
mox = self._prepare_clone_mock('fail')
mox.ReplayAll()
volume_name = 'volume_name'
clone_name = 'clone_name'
volume_id = volume_name + str(hash(volume_name))
try:
drv._clone_volume(volume_name, clone_name, volume_id)
except Exception as e:
if isinstance(e, api.NaApiError):
pass
else:
raise e
mox.VerifyAll()
| tomasdubec/openstack-cinder | cinder/tests/test_netapp_nfs.py | Python | apache-2.0 | 21,262 |
class MutableValue:
"""
Used to avoid warnings (and in future errors) from aiohttp when the app context is modified.
"""
__slots__ = 'value',
def __init__(self, value=None):
self.value = value
def change(self, new_value):
self.value = new_value
def __len__(self):
return len(self.value)
def __repr__(self):
return repr(self.value)
def __str__(self):
return str(self.value)
def __bool__(self):
return bool(self.value)
def __eq__(self, other):
return MutableValue(self.value == other)
def __add__(self, other):
return self.value + other
def __getattr__(self, item):
return getattr(self.value, item)
| samuelcolvin/aiohttp-devtools | aiohttp_devtools/runserver/utils.py | Python | mit | 732 |
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import mock
import pandas
import pytest
from google.api_core import exceptions
from google.auth.credentials import AnonymousCredentials
from google.cloud import automl_v1beta1
from google.cloud.automl_v1beta1.proto import data_types_pb2
PROJECT = "project"
REGION = "region"
LOCATION_PATH = "projects/{}/locations/{}".format(PROJECT, REGION)
class TestTablesClient(object):
def tables_client(
self, client_attrs={}, prediction_client_attrs={}, gcs_client_attrs={}
):
client_mock = mock.Mock(**client_attrs)
prediction_client_mock = mock.Mock(**prediction_client_attrs)
gcs_client_mock = mock.Mock(**gcs_client_attrs)
return automl_v1beta1.TablesClient(
client=client_mock,
prediction_client=prediction_client_mock,
gcs_client=gcs_client_mock,
project=PROJECT,
region=REGION,
)
def test_list_datasets_empty(self):
client = self.tables_client(
{
"list_datasets.return_value": [],
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_datasets()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_datasets.assert_called_with(LOCATION_PATH)
assert ds == []
def test_list_datasets_not_empty(self):
datasets = ["some_dataset"]
client = self.tables_client(
{
"list_datasets.return_value": datasets,
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_datasets()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_datasets.assert_called_with(LOCATION_PATH)
assert len(ds) == 1
assert ds[0] == "some_dataset"
def test_get_dataset_no_value(self):
dataset_actual = "dataset"
client = self.tables_client({}, {})
with pytest.raises(ValueError):
dataset = client.get_dataset()
client.auto_ml_client.get_dataset.assert_not_called()
def test_get_dataset_name(self):
dataset_actual = "dataset"
client = self.tables_client({"get_dataset.return_value": dataset_actual}, {})
dataset = client.get_dataset(dataset_name="my_dataset")
client.auto_ml_client.get_dataset.assert_called_with("my_dataset")
assert dataset == dataset_actual
def test_get_no_dataset(self):
client = self.tables_client(
{"get_dataset.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_dataset(dataset_name="my_dataset")
client.auto_ml_client.get_dataset.assert_called_with("my_dataset")
def test_get_dataset_from_empty_list(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.get_dataset(dataset_display_name="my_dataset")
def test_get_dataset_from_list_not_found(self):
client = self.tables_client(
{"list_datasets.return_value": [mock.Mock(display_name="not_it")]}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_dataset(dataset_display_name="my_dataset")
def test_get_dataset_from_list(self):
client = self.tables_client(
{
"list_datasets.return_value": [
mock.Mock(display_name="not_it"),
mock.Mock(display_name="my_dataset"),
]
},
{},
)
dataset = client.get_dataset(dataset_display_name="my_dataset")
assert dataset.display_name == "my_dataset"
def test_get_dataset_from_list_ambiguous(self):
client = self.tables_client(
{
"list_datasets.return_value": [
mock.Mock(display_name="my_dataset"),
mock.Mock(display_name="not_my_dataset"),
mock.Mock(display_name="my_dataset"),
]
},
{},
)
with pytest.raises(ValueError):
client.get_dataset(dataset_display_name="my_dataset")
def test_create_dataset(self):
client = self.tables_client(
{
"location_path.return_value": LOCATION_PATH,
"create_dataset.return_value": mock.Mock(display_name="name"),
},
{},
)
metadata = {"metadata": "values"}
dataset = client.create_dataset("name", metadata=metadata)
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.create_dataset.assert_called_with(
LOCATION_PATH, {"display_name": "name", "tables_dataset_metadata": metadata}
)
assert dataset.display_name == "name"
def test_delete_dataset(self):
dataset = mock.Mock()
dataset.configure_mock(name="name")
client = self.tables_client({"delete_dataset.return_value": None}, {})
client.delete_dataset(dataset=dataset)
client.auto_ml_client.delete_dataset.assert_called_with("name")
def test_delete_dataset_not_found(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
client.delete_dataset(dataset_display_name="not_found")
client.auto_ml_client.delete_dataset.assert_not_called()
def test_delete_dataset_name(self):
client = self.tables_client({"delete_dataset.return_value": None}, {})
client.delete_dataset(dataset_name="name")
client.auto_ml_client.delete_dataset.assert_called_with("name")
def test_export_not_found(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.export_data(dataset_display_name="name", gcs_input_uris="uri")
client.auto_ml_client.export_data.assert_not_called()
def test_export_gcs_uri(self):
client = self.tables_client({"export_data.return_value": None}, {})
client.export_data(dataset_name="name", gcs_output_uri_prefix="uri")
client.auto_ml_client.export_data.assert_called_with(
"name", {"gcs_destination": {"output_uri_prefix": "uri"}}
)
def test_export_bq_uri(self):
client = self.tables_client({"export_data.return_value": None}, {})
client.export_data(dataset_name="name", bigquery_output_uri="uri")
client.auto_ml_client.export_data.assert_called_with(
"name", {"bigquery_destination": {"output_uri": "uri"}}
)
def test_import_not_found(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.import_data(dataset_display_name="name", gcs_input_uris="uri")
client.auto_ml_client.import_data.assert_not_called()
def test_import_pandas_dataframe(self):
client = self.tables_client(
gcs_client_attrs={
"bucket_name": "my_bucket",
"upload_pandas_dataframe.return_value": "uri",
}
)
dataframe = pandas.DataFrame({})
client.import_data(
project=PROJECT,
region=REGION,
dataset_name="name",
pandas_dataframe=dataframe,
)
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri"]}}
)
def test_import_pandas_dataframe_init_gcs(self):
client = automl_v1beta1.TablesClient(
client=mock.Mock(),
prediction_client=mock.Mock(),
project=PROJECT,
region=REGION,
credentials=AnonymousCredentials(),
)
dataframe = pandas.DataFrame({})
patch = mock.patch(
"google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient",
bucket_name="my_bucket",
)
with patch as MockGcsClient:
mockInstance = MockGcsClient.return_value
mockInstance.upload_pandas_dataframe.return_value = "uri"
client.import_data(dataset_name="name", pandas_dataframe=dataframe)
assert client.gcs_client is mockInstance
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri"]}}
)
def test_import_gcs_uri(self):
client = self.tables_client({"import_data.return_value": None}, {})
client.import_data(dataset_name="name", gcs_input_uris="uri")
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri"]}}
)
def test_import_gcs_uris(self):
client = self.tables_client({"import_data.return_value": None}, {})
client.import_data(dataset_name="name", gcs_input_uris=["uri", "uri"])
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri", "uri"]}}
)
def test_import_bq_uri(self):
client = self.tables_client({"import_data.return_value": None}, {})
client.import_data(dataset_name="name", bigquery_input_uri="uri")
client.auto_ml_client.import_data.assert_called_with(
"name", {"bigquery_source": {"input_uri": "uri"}}
)
def test_list_table_specs(self):
client = self.tables_client({"list_table_specs.return_value": None}, {})
client.list_table_specs(dataset_name="name")
client.auto_ml_client.list_table_specs.assert_called_with("name")
def test_list_table_specs_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("not found")}, {}
)
with pytest.raises(exceptions.NotFound):
client.list_table_specs(dataset_name="name")
client.auto_ml_client.list_table_specs.assert_called_with("name")
def test_get_table_spec(self):
client = self.tables_client({}, {})
client.get_table_spec("name")
client.auto_ml_client.get_table_spec.assert_called_with("name")
def test_get_column_spec(self):
client = self.tables_client({}, {})
client.get_column_spec("name")
client.auto_ml_client.get_column_spec.assert_called_with("name")
def test_list_column_specs(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [],
},
{},
)
client.list_column_specs(dataset_name="name")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
def test_update_column_spec_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.update_column_spec(dataset_name="name", column_spec_name="column2")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_not_called()
def test_update_column_spec_display_name_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.update_column_spec(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_not_called()
def test_update_column_spec_name_no_args(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column/2", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(dataset_name="name", column_spec_name="column/2")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{"name": "column/2", "data_type": {"type_code": "type_code"}}
)
def test_update_column_spec_no_args(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name", column_spec_display_name="column"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{"name": "column", "data_type": {"type_code": "type_code"}}
)
def test_update_column_spec_nullable(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name", column_spec_display_name="column", nullable=True
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{
"name": "column",
"data_type": {"type_code": "type_code", "nullable": True},
}
)
def test_update_column_spec_type_code(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name",
column_spec_display_name="column",
type_code="type_code2",
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{"name": "column", "data_type": {"type_code": "type_code2"}}
)
def test_update_column_spec_type_code_nullable(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name",
nullable=True,
column_spec_display_name="column",
type_code="type_code2",
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{
"name": "column",
"data_type": {"type_code": "type_code2", "nullable": True},
}
)
def test_update_column_spec_type_code_nullable_false(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name",
nullable=False,
column_spec_display_name="column",
type_code="type_code2",
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{
"name": "column",
"data_type": {"type_code": "type_code2", "nullable": False},
}
)
def test_set_target_column_table_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.set_target_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_not_called()
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_target_column_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.set_target_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_target_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="2",
weight_column_spec_id="2",
ml_use_column_spec_id="3",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_target_column(dataset_name="name", column_spec_display_name="column")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": "3",
},
}
)
def test_set_weight_column_table_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("err")}, {}
)
try:
client.set_weight_column(
dataset_name="name", column_spec_display_name="column2"
)
except exceptions.NotFound:
pass
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_not_called()
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_weight_column_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.set_weight_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_weight_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/2", display_name="column")
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="1",
ml_use_column_spec_id="3",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_weight_column(dataset_name="name", column_spec_display_name="column")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": "3",
},
}
)
def test_clear_weight_column(self):
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="2",
ml_use_column_spec_id="3",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client({"get_dataset.return_value": dataset_mock}, {})
client.clear_weight_column(dataset_name="name")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": None,
"ml_use_column_spec_id": "3",
},
}
)
def test_set_test_train_column_table_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.set_test_train_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_not_called()
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_test_train_column_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.set_test_train_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_test_train_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/3", display_name="column")
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="2",
ml_use_column_spec_id="2",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_test_train_column(
dataset_name="name", column_spec_display_name="column"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": "3",
},
}
)
def test_clear_test_train_column(self):
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="2",
ml_use_column_spec_id="2",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client({"get_dataset.return_value": dataset_mock}, {})
client.clear_test_train_column(dataset_name="name")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": None,
},
}
)
def test_set_time_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/3", display_name="column")
dataset_mock = mock.Mock()
dataset_mock.configure_mock(name="dataset")
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_time_column(dataset_name="name", column_spec_display_name="column")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_table_spec.assert_called_with(
{"name": "table", "time_column_spec_id": "3"}
)
def test_clear_time_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
dataset_mock = mock.Mock()
dataset_mock.configure_mock(name="dataset")
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
},
{},
)
client.clear_time_column(dataset_name="name")
client.auto_ml_client.update_table_spec.assert_called_with(
{"name": "table", "time_column_spec_id": None}
)
def test_get_model_evaluation(self):
client = self.tables_client({}, {})
ds = client.get_model_evaluation(model_evaluation_name="x")
client.auto_ml_client.get_model_evaluation.assert_called_with("x")
def test_list_model_evaluations_empty(self):
client = self.tables_client({"list_model_evaluations.return_value": []}, {})
ds = client.list_model_evaluations(model_name="model")
client.auto_ml_client.list_model_evaluations.assert_called_with("model")
assert ds == []
def test_list_model_evaluations_not_empty(self):
evaluations = ["eval"]
client = self.tables_client(
{
"list_model_evaluations.return_value": evaluations,
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_model_evaluations(model_name="model")
client.auto_ml_client.list_model_evaluations.assert_called_with("model")
assert len(ds) == 1
assert ds[0] == "eval"
def test_list_models_empty(self):
client = self.tables_client(
{
"list_models.return_value": [],
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_models()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_models.assert_called_with(LOCATION_PATH)
assert ds == []
def test_list_models_not_empty(self):
models = ["some_model"]
client = self.tables_client(
{
"list_models.return_value": models,
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_models()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_models.assert_called_with(LOCATION_PATH)
assert len(ds) == 1
assert ds[0] == "some_model"
def test_get_model_name(self):
model_actual = "model"
client = self.tables_client({"get_model.return_value": model_actual}, {})
model = client.get_model(model_name="my_model")
client.auto_ml_client.get_model.assert_called_with("my_model")
assert model == model_actual
def test_get_no_model(self):
client = self.tables_client(
{"get_model.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_model(model_name="my_model")
client.auto_ml_client.get_model.assert_called_with("my_model")
def test_get_model_from_empty_list(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.get_model(model_display_name="my_model")
def test_get_model_from_list_not_found(self):
client = self.tables_client(
{"list_models.return_value": [mock.Mock(display_name="not_it")]}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_model(model_display_name="my_model")
def test_get_model_from_list(self):
client = self.tables_client(
{
"list_models.return_value": [
mock.Mock(display_name="not_it"),
mock.Mock(display_name="my_model"),
]
},
{},
)
model = client.get_model(model_display_name="my_model")
assert model.display_name == "my_model"
def test_get_model_from_list_ambiguous(self):
client = self.tables_client(
{
"list_models.return_value": [
mock.Mock(display_name="my_model"),
mock.Mock(display_name="not_my_model"),
mock.Mock(display_name="my_model"),
]
},
{},
)
with pytest.raises(ValueError):
client.get_model(model_display_name="my_model")
def test_delete_model(self):
model = mock.Mock()
model.configure_mock(name="name")
client = self.tables_client({"delete_model.return_value": None}, {})
client.delete_model(model=model)
client.auto_ml_client.delete_model.assert_called_with("name")
def test_delete_model_not_found(self):
client = self.tables_client({"list_models.return_value": []}, {})
client.delete_model(model_display_name="not_found")
client.auto_ml_client.delete_model.assert_not_called()
def test_delete_model_name(self):
client = self.tables_client({"delete_model.return_value": None}, {})
client.delete_model(model_name="name")
client.auto_ml_client.delete_model.assert_called_with("name")
def test_deploy_model_no_args(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.deploy_model()
client.auto_ml_client.deploy_model.assert_not_called()
def test_deploy_model(self):
client = self.tables_client({}, {})
client.deploy_model(model_name="name")
client.auto_ml_client.deploy_model.assert_called_with("name")
def test_deploy_model_not_found(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.deploy_model(model_display_name="name")
client.auto_ml_client.deploy_model.assert_not_called()
def test_undeploy_model(self):
client = self.tables_client({}, {})
client.undeploy_model(model_name="name")
client.auto_ml_client.undeploy_model.assert_called_with("name")
def test_undeploy_model_not_found(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.undeploy_model(model_display_name="name")
client.auto_ml_client.undeploy_model.assert_not_called()
def test_create_model(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/2", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
"location_path.return_value": LOCATION_PATH,
},
{},
)
client.create_model(
"my_model", dataset_name="my_dataset", train_budget_milli_node_hours=1000
)
client.auto_ml_client.create_model.assert_called_with(
LOCATION_PATH,
{
"display_name": "my_model",
"dataset_id": "my_dataset",
"tables_model_metadata": {"train_budget_milli_node_hours": 1000},
},
)
def test_create_model_include_columns(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock1 = mock.Mock()
column_spec_mock1.configure_mock(name="column/1", display_name="column1")
column_spec_mock2 = mock.Mock()
column_spec_mock2.configure_mock(name="column/2", display_name="column2")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [
column_spec_mock1,
column_spec_mock2,
],
"location_path.return_value": LOCATION_PATH,
},
{},
)
client.create_model(
"my_model",
dataset_name="my_dataset",
include_column_spec_names=["column1"],
train_budget_milli_node_hours=1000,
)
client.auto_ml_client.create_model.assert_called_with(
LOCATION_PATH,
{
"display_name": "my_model",
"dataset_id": "my_dataset",
"tables_model_metadata": {
"train_budget_milli_node_hours": 1000,
"input_feature_column_specs": [column_spec_mock1],
},
},
)
def test_create_model_exclude_columns(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock1 = mock.Mock()
column_spec_mock1.configure_mock(name="column/1", display_name="column1")
column_spec_mock2 = mock.Mock()
column_spec_mock2.configure_mock(name="column/2", display_name="column2")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [
column_spec_mock1,
column_spec_mock2,
],
"location_path.return_value": LOCATION_PATH,
},
{},
)
client.create_model(
"my_model",
dataset_name="my_dataset",
exclude_column_spec_names=["column1"],
train_budget_milli_node_hours=1000,
)
client.auto_ml_client.create_model.assert_called_with(
LOCATION_PATH,
{
"display_name": "my_model",
"dataset_id": "my_dataset",
"tables_model_metadata": {
"train_budget_milli_node_hours": 1000,
"input_feature_column_specs": [column_spec_mock2],
},
},
)
def test_create_model_invalid_hours_small(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model(
"my_model", dataset_name="my_dataset", train_budget_milli_node_hours=1
)
client.auto_ml_client.create_model.assert_not_called()
def test_create_model_invalid_hours_large(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model(
"my_model",
dataset_name="my_dataset",
train_budget_milli_node_hours=1000000,
)
client.auto_ml_client.create_model.assert_not_called()
def test_create_model_invalid_no_dataset(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model("my_model", train_budget_milli_node_hours=1000)
client.auto_ml_client.get_dataset.assert_not_called()
client.auto_ml_client.create_model.assert_not_called()
def test_create_model_invalid_include_exclude(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model(
"my_model",
dataset_name="my_dataset",
include_column_spec_names=["a"],
exclude_column_spec_names=["b"],
train_budget_milli_node_hours=1000,
)
client.auto_ml_client.get_dataset.assert_not_called()
client.auto_ml_client.create_model.assert_not_called()
def test_predict_from_array(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec = mock.Mock(display_name="a", data_type=data_type)
model_metadata = mock.Mock(input_feature_column_specs=[column_spec])
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict(["1"], model_name="my_model")
client.prediction_client.predict.assert_called_with(
"my_model", {"row": {"values": [{"string_value": "1"}]}}
)
def test_predict_from_dict(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_a = mock.Mock(display_name="a", data_type=data_type)
column_spec_b = mock.Mock(display_name="b", data_type=data_type)
model_metadata = mock.Mock(
input_feature_column_specs=[column_spec_a, column_spec_b]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict({"a": "1", "b": "2"}, model_name="my_model")
client.prediction_client.predict.assert_called_with(
"my_model",
{"row": {"values": [{"string_value": "1"}, {"string_value": "2"}]}},
)
def test_predict_from_dict_missing(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_a = mock.Mock(display_name="a", data_type=data_type)
column_spec_b = mock.Mock(display_name="b", data_type=data_type)
model_metadata = mock.Mock(
input_feature_column_specs=[column_spec_a, column_spec_b]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict({"a": "1"}, model_name="my_model")
client.prediction_client.predict.assert_called_with(
"my_model", {"row": {"values": [{"string_value": "1"}, {"null_value": 0}]}}
)
def test_predict_all_types(self):
float_type = mock.Mock(type_code=data_types_pb2.FLOAT64)
timestamp_type = mock.Mock(type_code=data_types_pb2.TIMESTAMP)
string_type = mock.Mock(type_code=data_types_pb2.STRING)
array_type = mock.Mock(type_code=data_types_pb2.ARRAY)
struct_type = mock.Mock(type_code=data_types_pb2.STRUCT)
category_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_float = mock.Mock(display_name="float", data_type=float_type)
column_spec_timestamp = mock.Mock(
display_name="timestamp", data_type=timestamp_type
)
column_spec_string = mock.Mock(display_name="string", data_type=string_type)
column_spec_array = mock.Mock(display_name="array", data_type=array_type)
column_spec_struct = mock.Mock(display_name="struct", data_type=struct_type)
column_spec_category = mock.Mock(
display_name="category", data_type=category_type
)
column_spec_null = mock.Mock(display_name="null", data_type=category_type)
model_metadata = mock.Mock(
input_feature_column_specs=[
column_spec_float,
column_spec_timestamp,
column_spec_string,
column_spec_array,
column_spec_struct,
column_spec_category,
column_spec_null,
]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict(
{
"float": 1.0,
"timestamp": "EST",
"string": "text",
"array": [1],
"struct": {"a": "b"},
"category": "a",
"null": None,
},
model_name="my_model",
)
client.prediction_client.predict.assert_called_with(
"my_model",
{
"row": {
"values": [
{"number_value": 1.0},
{"string_value": "EST"},
{"string_value": "text"},
{"list_value": [1]},
{"struct_value": {"a": "b"}},
{"string_value": "a"},
{"null_value": 0},
]
}
},
)
def test_predict_from_array_missing(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec = mock.Mock(display_name="a", data_type=data_type)
model_metadata = mock.Mock(input_feature_column_specs=[column_spec])
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
with pytest.raises(ValueError):
client.predict([], model_name="my_model")
client.prediction_client.predict.assert_not_called()
def test_batch_predict_pandas_dataframe(self):
client = self.tables_client(
gcs_client_attrs={
"bucket_name": "my_bucket",
"upload_pandas_dataframe.return_value": "gs://input",
}
)
dataframe = pandas.DataFrame({})
client.batch_predict(
project=PROJECT,
region=REGION,
model_name="my_model",
pandas_dataframe=dataframe,
gcs_output_uri_prefix="gs://output",
)
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"gcs_destination": {"output_uri_prefix": "gs://output"}},
)
def test_batch_predict_pandas_dataframe_init_gcs(self):
client = automl_v1beta1.TablesClient(
client=mock.Mock(),
prediction_client=mock.Mock(),
project=PROJECT,
region=REGION,
credentials=AnonymousCredentials(),
)
dataframe = pandas.DataFrame({})
patch = mock.patch(
"google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient",
bucket_name="my_bucket",
)
with patch as MockGcsClient:
mockInstance = MockGcsClient.return_value
mockInstance.upload_pandas_dataframe.return_value = "gs://input"
dataframe = pandas.DataFrame({})
client.batch_predict(
model_name="my_model",
pandas_dataframe=dataframe,
gcs_output_uri_prefix="gs://output",
)
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"gcs_destination": {"output_uri_prefix": "gs://output"}},
)
def test_batch_predict_gcs(self):
client = self.tables_client({}, {})
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"gcs_destination": {"output_uri_prefix": "gs://output"}},
)
def test_batch_predict_bigquery(self):
client = self.tables_client({}, {})
client.batch_predict(
model_name="my_model",
bigquery_input_uri="bq://input",
bigquery_output_uri="bq://output",
)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"bigquery_source": {"input_uri": "bq://input"}},
{"bigquery_destination": {"output_uri": "bq://output"}},
)
def test_batch_predict_mixed(self):
client = self.tables_client({}, {})
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
bigquery_output_uri="bq://output",
)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"bigquery_destination": {"output_uri": "bq://output"}},
)
def test_batch_predict_missing_input_gcs_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
gcs_input_uris=None,
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_input_bigquery_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
bigquery_input_uri=None,
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_output_gcs_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
gcs_output_uri_prefix=None,
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_output_bigquery_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
bigquery_output_uri=None,
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_model(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.batch_predict(
model_display_name="my_model",
gcs_input_uris="gs://input",
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_no_model(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
gcs_input_uris="gs://input", gcs_output_uri_prefix="gs://output"
)
client.auto_ml_client.list_models.assert_not_called()
client.prediction_client.batch_predict.assert_not_called()
def test_auto_ml_client_credentials(self):
credentials_mock = mock.Mock()
patch_auto_ml_client = mock.patch(
"google.cloud.automl_v1beta1.gapic.auto_ml_client.AutoMlClient"
)
with patch_auto_ml_client as MockAutoMlClient:
client = automl_v1beta1.TablesClient(credentials=credentials_mock)
_, auto_ml_client_kwargs = MockAutoMlClient.call_args
assert "credentials" in auto_ml_client_kwargs
assert auto_ml_client_kwargs["credentials"] == credentials_mock
def test_prediction_client_credentials(self):
credentials_mock = mock.Mock()
patch_prediction_client = mock.patch(
"google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient"
)
with patch_prediction_client as MockPredictionClient:
client = automl_v1beta1.TablesClient(credentials=credentials_mock)
_, prediction_client_kwargs = MockPredictionClient.call_args
assert "credentials" in prediction_client_kwargs
assert prediction_client_kwargs["credentials"] == credentials_mock
| tseaver/google-cloud-python | automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py | Python | apache-2.0 | 58,104 |
# Copyright (C) 2011-2014 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""Global events."""
from __future__ import absolute_import, print_function, unicode_literals
__metaclass__ = type
__all__ = [
'initialize',
]
from zope import event
from mailman.app import (
domain, membership, moderator, registrar, subscriptions)
from mailman.core import i18n, switchboard
from mailman.languages import manager as language_manager
from mailman.styles import manager as style_manager
from mailman.utilities import passwords
def initialize():
"""Initialize global event subscribers."""
event.subscribers.extend([
domain.handle_DomainDeletingEvent,
i18n.handle_ConfigurationUpdatedEvent,
language_manager.handle_ConfigurationUpdatedEvent,
membership.handle_SubscriptionEvent,
moderator.handle_ListDeletingEvent,
passwords.handle_ConfigurationUpdatedEvent,
registrar.handle_ConfirmationNeededEvent,
style_manager.handle_ConfigurationUpdatedEvent,
subscriptions.handle_ListDeletingEvent,
switchboard.handle_ConfigurationUpdatedEvent,
])
| adam-iris/mailman | src/mailman/app/events.py | Python | gpl-3.0 | 1,804 |
import datetime
from ansible.runner import Runner
from ansible import constants
from bson import ObjectId
from celery import Celery
from celery.exceptions import SoftTimeLimitExceeded
from celery_redis_sentinel import register
import spot.tasks.celeryconfig
from spot.spotconfig import SpotConfig
from spot.spotutils import SpotUtils
cfg = SpotConfig()
cfg.load(config_file='/etc/spot/spot.cfg')
spot_utils = SpotUtils()
if cfg.get_section_option('plugins', 'storage'):
storage_module = spot_utils.load_plugin(cfg.get_section_option('plugins', 'storage'))
else:
import spot.storage.mongo
storage_module = spot.storage.mongo
storage = storage_module.SpotStorage()
for conf in storage.plugin_required_config():
if not cfg.get_section('storage'):
raise AttributeError("[storage] section missing from %s" % cfg.config_source)
if conf not in cfg.get_section('storage'):
raise AttributeError("%s not present in section [storage] in config file %s. Storage plugin requires: %s" % (
conf, cfg.config_source, storage.plugin_required_config()))
register()
app = Celery()
app.config_from_object('spot.tasks.celeryconfig.Config')
@app.task
def ansible_runner(inventory_manager, pattern, run_hosts):
if cfg.get_section_option('ansible', 'private_key_file'):
private_key_file = cfg.get_section_option('ansible', 'private_key_file')
else:
private_key_file = constants.DEFAULT_PRIVATE_KEY_FILE
if cfg.get_section_option('ansible', 'remote_user'):
remote_user = cfg.get_section_option('ansible', 'remote_user')
else:
remote_user = constants.DEFAULT_REMOTE_USER
try:
runner = Runner(
module_name="setup",
module_args="",
inventory=inventory_manager,
pattern=pattern,
run_hosts=run_hosts,
become=True,
forks=5,
timeout=15,
private_key_file=private_key_file,
remote_user=remote_user
)
data = runner.run()
except IOError, e:
print "IOError happened.. stopping here, ", e
return []
except SoftTimeLimitExceeded, soft_time_limit:
print "Runner is going to time out. Exit now!", soft_time_limit
return []
except Exception, e:
print "Something bad happened, ", e
return []
try:
dark = data["dark"]
for hostname in dark:
# storage.start(database_name='spot')
storage.start(mongourl=cfg.get_section_option('storage', 'mongo_url'),
user=cfg.get_section_option('storage', 'mongo_user'),
password=cfg.get_section_option('storage', 'mongo_password'),
rs=cfg.get_section_option('storage', 'replica_set'),
read_preference=cfg.get_section_option('storage', 'read_preference'),
database_name=cfg.get_section_option('storage', 'database_name'))
db_host = storage.get_one('ansible_hosts', hostname=hostname)
if db_host is not None:
db_host["dark"] = True
# Update this with logging
print "Dark host: %s" % hostname
print dark
if db_host:
storage.save('ansible_hosts', db_host)
storage.disconnect()
return []
except KeyError:
pass
try:
return data["contacted"]
except KeyError:
return []
@app.task
def software_api(inventory_manager, module_path, pattern, hostname):
rpm_runner = Runner(
module_name="rpm",
module_args="list=True",
pattern=pattern,
module_path=module_path,
inventory=inventory_manager,
run_hosts=[hostname],
forks=1,
)
rpm_data = rpm_runner.run()
if 'contacted' in rpm_data:
try:
packages = rpm_data['contacted'][hostname]['results']
storage.start(mongourl=cfg.get_section_option('storage', 'mongo_url'),
user=cfg.get_section_option('storage', 'mongo_user'),
password=cfg.get_section_option('storage', 'mongo_password'),
rs=cfg.get_section_option('storage', 'replica_set'),
read_preference=cfg.get_section_option('storage', 'read_preference'),
database_name=cfg.get_section_option('storage', 'database_name'))
db_host = storage.get_one('ansible_hosts', hostname=hostname)
db_host = db_host['_id']
host_id = ObjectId(db_host)
software_id = storage.get_one('software', ref_host=host_id)
if software_id is None:
software_data = {
"last_updated": datetime.datetime.utcnow(),
"ref_host": db_host,
"installed_software": packages
}
else:
software_data = {
"_id": software_id['_id'],
"last_updated": datetime.datetime.utcnow(),
"ref_host": db_host,
"installed_software": packages
}
# !!!Fix this!!!!!
# Writing twice, for no reason, if swid already there no need
software_id = storage.save('software', software_data)
db_host = storage.get_one('ansible_hosts', _id=host_id)
db_host['ref_software'] = software_id
storage.save('ansible_hosts', db_host)
rpm_data = None
storage.disconnect()
except KeyError, e:
print "KeyError: ", e, rpm_data
pass
except Exception, e:
print "IOError happened.. stopping here, ", e
pass
else:
print rpm_data
@app.task
def analyze_host(hostname, values):
storage.start(mongourl=cfg.get_section_option('storage', 'mongo_url'),
user=cfg.get_section_option('storage', 'mongo_user'),
password=cfg.get_section_option('storage', 'mongo_password'),
rs=cfg.get_section_option('storage', 'replica_set'),
read_preference=cfg.get_section_option('storage', 'read_preference'),
database_name=cfg.get_section_option('storage', 'database_name'))
new_db_host = storage.get_one('ansible_hosts', hostname=hostname)
if not new_db_host:
new_db_host = {}
if 'dark' in new_db_host:
print hostname + " was dark, it came back to life"
del new_db_host['dark']
for key in spot_utils.facts_map:
if spot_utils.facts_map[key] in values['ansible_facts']:
new_db_host[key] = values['ansible_facts'][
spot_utils.facts_map[key]]
else:
new_db_host[key] = "dark"
# Manage the interfaces separately as there might be more than 1
for interface in values['ansible_facts']['ansible_interfaces']:
inv_interface = 'ansible_' + interface
new_db_host[interface] = values['ansible_facts'][inv_interface]
new_db_host["last_updated"] = datetime.datetime.utcnow()
storage.save('ansible_hosts', new_db_host)
storage.disconnect()
| Shaps/spot | spot/tasks/__init__.py | Python | gpl-3.0 | 7,266 |
from juju.lib.testing import TestCase
from juju.state.endpoint import RelationEndpoint
class RelationEndpointTest(TestCase):
def test_may_relate_to(self):
# TODO: Needs a doc string
mysql_ep = RelationEndpoint("mysqldb", "mysql", "db", "server")
blog_ep = RelationEndpoint("blog", "mysql", "mysql", "client")
pg_ep = RelationEndpoint("postgres", "postgres", "db", "server")
self.assertRaises(TypeError, mysql_ep.may_relate_to, 42)
# should relate, along with symmetric case
self.assert_(mysql_ep.may_relate_to(blog_ep))
self.assert_(blog_ep.may_relate_to(mysql_ep))
# no common relation_type
self.assertFalse(blog_ep.may_relate_to(pg_ep))
self.assertFalse(pg_ep.may_relate_to(blog_ep))
# antireflexive om relation_role -
# must be consumer AND provider or vice versa
self.assertFalse(blog_ep.may_relate_to(
RelationEndpoint("foo", "mysql", "db", "client")))
self.assertFalse(mysql_ep.may_relate_to(
RelationEndpoint("foo", "mysql", "db", "server")))
# irreflexive for server/client
self.assertFalse(mysql_ep.may_relate_to(mysql_ep))
self.assertFalse(blog_ep.may_relate_to(blog_ep))
self.assertFalse(pg_ep.may_relate_to(pg_ep))
# but reflexive for peer
riak_ep = RelationEndpoint("riak", "riak", "riak", "peer")
self.assert_(riak_ep.may_relate_to(riak_ep))
| anbangr/trusted-juju | juju/state/tests/test_endpoint.py | Python | agpl-3.0 | 1,469 |
# -*- coding: utf-8 -*-
# Copyright 2017 DST Controls
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
osisoftpy.elements
~~~~~~~~~~~~
Some blah blah about what this file is for...
max values = 2147483647
"""
#Update comment block above
from __future__ import (absolute_import, division, unicode_literals)
from future.builtins import *
import collections
import logging
from osisoftpy.factory import Factory
from osisoftpy.factory import create
from osisoftpy.internal import get_batch
from osisoftpy.value import Value
log = logging.getLogger(__name__)
class Elements(collections.MutableSequence):
def __init__(self, iterable, webapi):
self.list = list()
self.webapi = webapi
self.extend(list(iterable))
def __getitem__(self, key):
return self.list[key]
def __setitem__(self, key, value):
self.list[key] = value
def __delitem__(self, key):
del self.list[key]
def __len__(self):
return len(self.list)
def insert(self, key, value):
self.list.insert(key, value)
def __str__(self):
return str(self.list)
@property
def session(self):
return self.webapi.session
| dstcontrols/osisoftpy | src/osisoftpy/elements.py | Python | apache-2.0 | 1,714 |
"""
Copyright (C) 2010-2013, Ryan Fan
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Library General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
class SRInfo:
"""
"""
def __init__(self, jsonDict):
for k in jsonDict.keys():
self.__dict__[k.lower()] = jsonDict[k]
| rfancn/myprojects | analyzesr/srInfo.py | Python | mit | 891 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-10-18 17:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('training', '0008_remove_challenge_terminal_password'),
]
operations = [
migrations.AddField(
model_name='challenge',
name='will',
field=models.BooleanField(default=False),
),
]
| pattyjogal/wy_ctf_website | wy_ctf_website/training/migrations/0009_challenge_will.py | Python | mit | 469 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the PyNeurActiv project, which aims at providing tools
# to study and model the activity of neuronal cultures.
# Copyright (C) 2017 SENeC Initiative
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This particular file is taken from the `atroML <http://www.astroml.org/>`
# project and is provided under a BSD license.
#
# Modified by Tanguy Fardet (6/26/2017): added min/max_width keywords
"""
Bayesian Block implementation
=============================
Dynamic programming algorithm for finding the optimal adaptive-width histogram.
Based on Scargle et al 2012 [1]_
References
----------
.. [1] http://adsabs.harvard.edu/abs/2012arXiv1207.5578S
"""
import numpy as np
class FitnessFunc(object):
'''
Base class for fitness functions
Each fitness function class has the following:
- fitness(...) : compute fitness function.
Arguments accepted by fitness must be among [T_k, N_k, a_k, b_k, c_k]
- prior(N, Ntot) : compute prior on N given a total number of points Ntot
'''
def __init__(self, p0=0.05, gamma=None):
self.p0 = p0
self.gamma = gamma
def validate_input(self, t, x, sigma):
'''
Check that input is valid
'''
pass
def fitness(**kwargs):
raise NotImplementedError()
def prior(self, N, Ntot):
if self.gamma is None:
return self.p0_prior(N, Ntot)
else:
return self.gamma_prior(N, Ntot)
def p0_prior(self, N, Ntot):
# eq. 21 from Scargle 2012
return 4 - np.log(73.53 * self.p0 * (N ** -0.478))
def gamma_prior(self, N, Ntot):
'''
Basic prior, parametrized by gamma (eq. 3 in Scargle 2012)
'''
if self.gamma == 1:
return 0
else:
return (np.log(1 - self.gamma)
- np.log(1 - self.gamma ** (Ntot + 1))
+ N * np.log(self.gamma))
# the fitness_args property will return the list of arguments accepted by
# the method fitness(). This allows more efficient computation below.
@property
def args(self):
try:
# Python 2
return self.fitness.func_code.co_varnames[1:]
except AttributeError:
return self.fitness.__code__.co_varnames[1:]
class Events(FitnessFunc):
"""
Fitness for binned or unbinned events
Parameters
----------
p0 : float
False alarm probability, used to compute the prior on N
(see eq. 21 of Scargle 2012). Default prior is for p0 = 0.
gamma : float or None
If specified, then use this gamma to compute the general prior form,
p ~ gamma^N. If gamma is specified, p0 is ignored.
"""
def fitness(self, N_k, T_k):
# eq. 19 from Scargle 2012
pos = (N_k != 0) * (T_k != 0)
res = np.zeros(len(N_k))
res[pos] = N_k[pos] * (np.log(N_k[pos]) - np.log(T_k[pos]))
return res
def prior(self, N, Ntot):
if self.gamma is not None:
return self.gamma_prior(N, Ntot)
else:
# eq. 21 from Scargle 2012
return 4 - np.log(73.53 * self.p0 * (N ** -0.478))
class RegularEvents(FitnessFunc):
"""
Fitness for regular events
This is for data which has a fundamental "tick" length, so that all
measured values are multiples of this tick length. In each tick, there
are either zero or one counts.
Parameters
----------
dt : float
tick rate for data
gamma : float
specifies the prior on the number of bins: p ~ gamma^N
"""
def __init__(self, dt, p0=0.05, gamma=None):
self.dt = dt
self.p0 = p0
self.gamma = gamma
def validate_input(self, t, x, sigma):
unique_x = np.unique(x)
if list(unique_x) not in ([0], [1], [0, 1]):
raise ValueError("Regular events must have only 0 and 1 in x")
def fitness(self, T_k, N_k):
# Eq. 75 of Scargle 2012
M_k = T_k / self.dt
N_over_M = N_k * 1. / M_k
eps = 1E-8
if np.any(N_over_M > 1 + eps):
import warnings
warnings.warn('regular events: N/M > 1. '
'Is the time step correct?')
one_m_NM = 1 - N_over_M
N_over_M[N_over_M <= 0] = 1
one_m_NM[one_m_NM <= 0] = 1
return N_k * np.log(N_over_M) + (M_k - N_k) * np.log(one_m_NM)
class PointMeasures(FitnessFunc):
"""
Fitness for point measures
Parameters
----------
gamma : float
specifies the prior on the number of bins: p ~ gamma^N
if gamma is not specified, then a prior based on simulations
will be used (see sec 3.3 of Scargle 2012)
"""
def __init__(self, p0=None, gamma=None):
self.p0 = p0
self.gamma = gamma
def fitness(self, a_k, b_k):
# eq. 41 from Scargle 2012
return (b_k * b_k) / (4 * a_k)
def prior(self, N, Ntot):
if self.gamma is not None:
return self.gamma_prior(N, Ntot)
elif self.p0 is not None:
return self.p0_prior(N, Ntot)
else:
# eq. at end of sec 3.3 in Scargle 2012
return 1.32 + 0.577 * np.log10(N)
def bayesian_blocks(t, x=None, sigma=None, fitness='events', min_width=0.,
max_width=np.inf, **kwargs):
"""
Bayesian Blocks Implementation
This is a flexible implementation of the Bayesian Blocks algorithm
described in Scargle 2012 [1]_
Parameters
----------
t : array_like
data times (one dimensional, length N)
x : array_like (optional)
data values
sigma : array_like or float (optional)
data errors
fitness : str or object
the fitness function to use.
If a string, the following options are supported:
- 'events' : binned or unbinned event data
extra arguments are `p0`, which gives the false alarm probability
to compute the prior, or `gamma` which gives the slope of the
prior on the number of bins.
For this method, additional `min_width` and `max_width` keywords
can be provided to prevent excessively large or small bins.
- 'regular_events' : non-overlapping events measured at multiples
of a fundamental tick rate, `dt`, which must be specified as an
additional argument. The prior can be specified through `gamma`,
which gives the slope of the prior on the number of bins.
- 'measures' : fitness for a measured sequence with Gaussian errors
The prior can be specified using `gamma`, which gives the slope
of the prior on the number of bins. If `gamma` is not specified,
then a simulation-derived prior will be used.
Alternatively, the fitness can be a user-specified object of
type derived from the FitnessFunc class.
min_width : float, optional (default: 0.)
Minimum width accepted for a bin.
max_width : float, optional (default: infinity)
Maximum acceptable width for a bin.
Returns
-------
edges : ndarray
array containing the (N+1) bin edges
Examples
--------
Event data:
>>> t = np.random.normal(size=100)
>>> bins = bayesian_blocks(t, fitness='events', p0=0.01)
Event data with repeats:
>>> t = np.random.normal(size=100)
>>> t[80:] = t[:20]
>>> bins = bayesian_blocks(t, fitness='events', p0=0.01)
Regular event data:
>>> dt = 0.01
>>> t = dt * np.arange(1000)
>>> x = np.zeros(len(t))
>>> x[np.random.randint(0, len(t), len(t) / 10)] = 1
>>> bins = bayesian_blocks(t, fitness='regular_events', dt=dt, gamma=0.9)
Measured point data with errors:
>>> t = 100 * np.random.random(100)
>>> x = np.exp(-0.5 * (t - 50) ** 2)
>>> sigma = 0.1
>>> x_obs = np.random.normal(x, sigma)
>>> bins = bayesian_blocks(t, fitness='measures')
References
----------
.. [1] Scargle, J `et al.` (2012)
http://adsabs.harvard.edu/abs/2012arXiv1207.5578S
See Also
--------
:func:`astroML.plotting.hist` : histogram plotting function which can make
use of bayesian blocks.
"""
# validate array input
t = np.asarray(t, dtype=float)
if x is not None:
x = np.asarray(x)
if sigma is not None:
sigma = np.asarray(sigma)
# verify the fitness function
if fitness == 'events':
if x is not None and np.any(x % 1 > 0):
raise ValueError("x must be integer counts for fitness='events'")
fitfunc = Events(**kwargs)
elif fitness == 'regular_events':
if x is not None and (np.any(x % 1 > 0) or np.any(x > 1)):
raise ValueError("x must be 0 or 1 for fitness='regular_events'")
fitfunc = RegularEvents(**kwargs)
elif fitness == 'measures':
if x is None:
raise ValueError("x must be specified for fitness='measures'")
fitfunc = PointMeasures(**kwargs)
else:
if not (hasattr(fitness, 'args') and
hasattr(fitness, 'fitness') and
hasattr(fitness, 'prior')):
raise ValueError("fitness not understood")
fitfunc = fitness
# find unique values of t
t = np.array(t, dtype=float)
assert t.ndim == 1
unq_t, unq_ind, unq_inv = np.unique(t, return_index=True,
return_inverse=True)
# if x is not specified, x will be counts at each time
if x is None:
if sigma is not None:
raise ValueError("If sigma is specified, x must be specified")
if len(unq_t) == len(t):
x = np.ones_like(t)
else:
x = np.bincount(unq_inv)
t = unq_t
sigma = 1
# if x is specified, then we need to sort t and x together
else:
x = np.asarray(x)
if len(t) != len(x):
raise ValueError("Size of t and x does not match")
if len(unq_t) != len(t):
raise ValueError("Repeated values in t not supported when "
"x is specified")
t = unq_t
x = x[unq_ind]
# verify the given sigma value
N = t.size
if sigma is not None:
sigma = np.asarray(sigma)
if sigma.shape not in [(), (1,), (N,)]:
raise ValueError('sigma does not match the shape of x')
else:
sigma = 1
# validate the input
fitfunc.validate_input(t, x, sigma)
# compute values needed for computation, below
if 'a_k' in fitfunc.args:
ak_raw = np.ones_like(x) / sigma / sigma
if 'b_k' in fitfunc.args:
bk_raw = x / sigma / sigma
if 'c_k' in fitfunc.args:
ck_raw = x * x / sigma / sigma
# create length-(N + 1) array of cell edges
edges = np.concatenate([t[:1],
0.5 * (t[1:] + t[:-1]),
t[-1:]])
block_length = t[-1] - edges
# arrays to store the best configuration
best = np.zeros(N, dtype=float)
last = np.zeros(N, dtype=int)
#-----------------------------------------------------------------
# Start with first data cell; add one cell at each iteration
#-----------------------------------------------------------------
for R in range(N):
# Compute fit_vec : fitness of putative last block (end at R)
kwds = {}
# T_k: width/duration of each block
if 'T_k' in fitfunc.args:
kwds['T_k'] = np.clip(
block_length[:R + 1] - block_length[R + 1],
min_width, max_width)
# N_k: number of elements in each block
if 'N_k' in fitfunc.args:
kwds['N_k'] = np.cumsum(x[:R + 1][::-1])[::-1]
# a_k: eq. 31
if 'a_k' in fitfunc.args:
kwds['a_k'] = 0.5 * np.cumsum(ak_raw[:R + 1][::-1])[::-1]
# b_k: eq. 32
if 'b_k' in fitfunc.args:
kwds['b_k'] = - np.cumsum(bk_raw[:R + 1][::-1])[::-1]
# c_k: eq. 33
if 'c_k' in fitfunc.args:
kwds['c_k'] = 0.5 * np.cumsum(ck_raw[:R + 1][::-1])[::-1]
# evaluate fitness function
fit_vec = fitfunc.fitness(**kwds)
A_R = fit_vec - fitfunc.prior(R + 1, N)
A_R[1:] += best[:R]
i_max = np.argmax(A_R)
last[R] = i_max
best[R] = A_R[i_max]
#-----------------------------------------------------------------
# Now find changepoints by iteratively peeling off the last block
#-----------------------------------------------------------------
change_points = np.zeros(N, dtype=int)
i_cp = N
ind = N
while True:
i_cp -= 1
change_points[i_cp] = ind
if ind == 0:
break
ind = last[ind - 1]
change_points = change_points[i_cp:]
return edges[change_points]
| SENeC-Initiative/PyNeurActiv | analysis/bayesian_blocks.py | Python | gpl-3.0 | 13,573 |
00000 0 output/evolve2.py.err
00000 0 output/evolve2.py.out
53974 2 output/evolve2.py.series
| Conedy/Conedy | testing/dynNetwork/expected/sum_evolve2.py | Python | gpl-2.0 | 105 |
__author__ = 'TPei'
from tkinter import *
from data.manager import *
from data.DataHandler import DataHandler
class App:
def __init__(self, master):
master.wm_title("Jawbone Visualizer")
self.button_width = 25
self.description = Label(master, text="Please choose a visualization mode:")
self.description.pack()
self.sleep = Button(master,
width=self.button_width,
text="Nightly Sleep over time",
command=self.sleep)
self.sleep.pack()
self.spw = Button(master,
width=self.button_width,
text="Average Sleep per Weekday",
command=self.sleep_per_weekday)
self.spw.pack()
self.cpw = Button(master,
width=self.button_width,
text="Average Coffee per Weekday",
command=self.coffee_per_weekday)
self.cpw.pack()
self.step = Button(master,
width=self.button_width,
text="Daily Steps over time",
command=self.steps)
self.step.pack()
self.sum = Button(master,
width=self.button_width,
text="Total Steps / Sleep over time",
command=self.sums)
self.sum.pack()
self.cvs = Button(master,
width=self.button_width,
text="Coffee's effect on sleep",
command=self.coffee_vs_sleep)
self.cvs.pack()
self.svc = Button(master,
width=self.button_width,
text="Sleep's effect on coffee",
command=self.sleep_vs_coffee)
self.svc.pack()
Label(master, text="OR").pack(anchor=W)
self.composite_sleep = IntVar()
self.composite_coffee = IntVar()
self.composite_steps = IntVar()
Label(master, text="Create a composite line chart").pack(anchor=W)
Checkbutton(master, text="Sleep", variable=self.composite_sleep).pack(anchor=W)
Checkbutton(master, text="Coffee", variable=self.composite_coffee).pack(anchor=W)
Checkbutton(master, text="Steps", variable=self.composite_steps).pack(anchor=W)
self.cvs = Button(master,
width=self.button_width,
text="Create",
command=self.composite).pack()
@staticmethod
def coffee_vs_sleep():
coffee_effect_sleep(get_all_the_data())
@staticmethod
def sleep_vs_coffee():
sleep_effect_on_coffee(get_all_the_data())
@staticmethod
def sleep():
plot_sleep(get_all_the_data())
@staticmethod
def steps():
plot_step_graph()
@staticmethod
def sums():
step_sleep_total()
@staticmethod
def sleep_per_weekday():
visualize_sleep_per_weekday()
@staticmethod
def coffee_per_weekday():
visualize_coffee_per_weekday()
@staticmethod
def composite(self):
values = []
if self.composite_sleep.get() == 1:
values.append(1)
if self.composite_coffee.get() == 1:
values.append(3)
if self.composite_steps.get() == 1:
values.append(2)
plot_all(get_all_the_data(), values)
root = Tk()
app = App(root)
root.mainloop()
'''
visualize_sleep_per_weekday()
plot_sleep()
plot_step_graph()
plot_all(get_all_the_data())
print(get_all_the_data('awake_time'))
compareDicts(get_all_the_data(), get_all_the_data('awake_time'))
coffee_effect_sleep(get_all_the_data())
''' | TPei/jawbone_visualizer | main_gui.py | Python | mit | 3,797 |
# Copyright (C) 2016 Noah Meltzer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__author__ = "Noah Meltzer"
__copyright__ = "Copyright 2016, McDermott Group"
__license__ = "GPL"
__version__ = "0.0.1"
__maintainer__ = "Noah Meltzer"
__status__ = "Beta"
from MNodeEditor.MNode import MNode
from MNodeEditor.MAnchor import MAnchor
import traceback
from PyQt4 import QtCore, QtGui
from functools import partial
import numpy as np
import time
from PyQt4.QtCore import QObject, pyqtSignal, pyqtSlot, QThread
class MDeviceNode(MNode):
def __init__(self, device, *args, **kwargs):
super(MDeviceNode, self).__init__(None, *args, **kwargs)
self.device = device
self.title = str(device)
# print "Adding Node for device:", device
self.dontAddanotherparam = False
def onBegin(self):
self.device.getFrame().setNode(self)
# If the node represents a labrad device, then the title displayed on the node
# should be the same as the title of the device
self.setTitle(self.device.getFrame().getTitle())
# Without this next line, adding an anchor adds a parameter
# and adding a parameter adds an anchor which causes an infinite
# loop.
nicknames = self.device.getFrame().getNicknames()
self.dontAddanotherparam = True
for i, param in enumerate(nicknames):
self.addAnchor(MAnchor(param, self, i + 1, type='output'))
devAnchor = self.addAnchor(name='Self', type='output')
devAnchor.setData(self.getDevice())
self.dontAddanotherparam = False
# print "Adding anchor", self.getAnchors()[-1]
def isPropogateData(self):
return self.propogateData
def getDevice(self):
return self.device
def anchorAdded(self, anchor, **kwargs):
if not self.dontAddanotherparam:
self.device.addParameter(str(anchor), None, None, **kwargs)
pass
def onRefreshData(self):
try:
for i, anchor in enumerate(self.getAnchors()):
if str(anchor) == 'Self':
continue
reading = self.device.getReading(str(anchor))
if anchor.getType() == 'output' and anchor.param != 'Self' and reading != None:
try:
data = self.device.getReading(str(anchor))
metadata = (str(anchor), self.device.getUnit(
str(anchor)), None)
if data != None:
anchor.setMetaData(metadata)
anchor.setData(data)
except:
traceback.print_exc()
elif anchor.getType() == 'input':
data = anchor.getData()
metadata = anchor.getMetaData()
if data != None:
reading = data
self.device.setReading(str(anchor), reading)
if metadata != None:
self.device.setUnit(str(anchor), metadata[1])
except:
traceback.print_exc()
| nmGit/MView | MNodeEditor/MNodes/MDeviceNode.py | Python | gpl-3.0 | 3,749 |
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from aquilon.exceptions_ import ArgumentError
from aquilon.aqdb.model import ARecord
from aquilon.aqdb.model.network import get_net_id_from_ip
from aquilon.aqdb.model.network_environment import get_net_dns_env
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.dbwrappers.dns import (set_reverse_ptr,
delete_target_if_needed)
from aquilon.worker.processes import DSDBRunner
class CommandUpdateAddress(BrokerCommand):
def render(self, session, logger, fqdn, ip, reverse_ptr, dns_environment,
network_environment, comments, **arguments):
dbnet_env, dbdns_env = get_net_dns_env(session, network_environment,
dns_environment)
dbdns_rec = ARecord.get_unique(session, fqdn=fqdn,
dns_environment=dbdns_env, compel=True)
old_ip = dbdns_rec.ip
old_comments = dbdns_rec.comments
if ip:
if dbdns_rec.hardware_entity:
raise ArgumentError("{0} is a primary name, and its IP address "
"cannot be changed.".format(dbdns_rec))
if dbdns_rec.assignments:
ifaces = ", ".join(["%s/%s" % (addr.interface.hardware_entity,
addr.interface)
for addr in dbdns_rec.assignments])
raise ArgumentError("{0} is already used by the following "
"interfaces, and its IP address cannot be "
"changed: {1!s}."
.format(dbdns_rec, ifaces))
dbnetwork = get_net_id_from_ip(session, ip, dbnet_env)
q = session.query(ARecord)
q = q.filter_by(network=dbnetwork)
q = q.filter_by(ip=ip)
q = q.join(ARecord.fqdn)
q = q.filter_by(dns_environment=dbdns_env)
existing = q.first()
if existing:
raise ArgumentError("IP address {0!s} is already used by "
"{1:l}." .format(ip, existing))
dbdns_rec.network = dbnetwork
old_ip = dbdns_rec.ip
dbdns_rec.ip = ip
if reverse_ptr:
old_reverse = dbdns_rec.reverse_ptr
set_reverse_ptr(session, logger, dbdns_rec, reverse_ptr)
if old_reverse and old_reverse != dbdns_rec.reverse_ptr:
delete_target_if_needed(session, old_reverse)
if comments:
dbdns_rec.comments = comments
session.flush()
if dbdns_env.is_default and (dbdns_rec.ip != old_ip or
dbdns_rec.comments != old_comments):
dsdb_runner = DSDBRunner(logger=logger)
dsdb_runner.update_host_details(dbdns_rec.fqdn, new_ip=dbdns_rec.ip,
old_ip=old_ip,
new_comments=dbdns_rec.comments,
old_comments=old_comments)
dsdb_runner.commit_or_rollback()
return
| jrha/aquilon | lib/python2.6/aquilon/worker/commands/update_address.py | Python | apache-2.0 | 3,917 |
# -*- coding: utf-8 -*-
"""
Set up or update Jenkins Jobs
=============================
Update Jenkins jobs for GitHub repositories.
"""
#########################################
# import
import sys
import os
from pyquickhelper.loghelper import get_keyword
#########################################
# logging
from pyquickhelper.loghelper import fLOG # publish_lectures
fLOG(OutputPrint=True)
#########################################
# import des fonctions dont on a besoin
from pyquickhelper.jenkinshelper import JenkinsExt
from ensae_teaching_cs.automation.jenkins_helper import setup_jenkins_server, engines_default
#########################################
# récupération des identifiants Jenkins
user = get_password("jenkins", "_automation,user")
pwd = get_password("jenkins", "_automation,pwd")
#########################################
# instantiation d'une classe faisant l'interface avec le service
platform = sys.platform
if platform.startswith("win"):
location = "d:\\jenkins\\pymy"
else:
location = "/var/lib/jenkins/workspace"
js = JenkinsExt('http://localhost:8080/', user, pwd, platform=platform,
fLOG=fLOG, engines=engines_default(platform=platform))
#########################################
# mise à jour des jobs
setup_jenkins_server(js, overwrite=True,
delete_first=False,
location="d:\\jenkins\\pymy",
disable_schedule=False)
| sdpython/ensae_teaching_cs | _doc/examples/automation/jenkins_setup.py | Python | mit | 1,450 |
# Copyright (c) 2013 David Holm <dholmster@gmail.com>
# This file is part of SimpleGUITk - https://github.com/dholm/simpleguitk
# See the file 'COPYING' for copying permission.
from .plot import plot_lines
| dholm/simpleguitk | simpleplot/__init___flymake.py | Python | bsd-3-clause | 207 |
"""Unit tests for the solve function on manifolds
embedded in higher dimensional spaces."""
# Copyright (C) 2012 Imperial College London and others.
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# Modified by David Ham 2012
#
# First added: 2012-12-06
# Last changed: 2012-12-06
# MER: The solving test should be moved into test/regression/..., the
# evaluatebasis part should be moved into test/unit/FiniteElement.py
import unittest
from dolfin import *
from itertools import izip
import numpy
# Subdomain to extract bottom boundary.
class BottomEdge(SubDomain):
def inside(self, x, on_boundary):
return near(x[2], 0.0)
class Rotation(object):
"""Class implementing rotations of the unit plane through an angle
of phi about the x axis followed by theta about the z axis."""
def __init__(self, phi, theta):
self.theta = theta
self.mat = numpy.dot(self._zmat(theta), self._xmat(phi))
self.invmat = numpy.dot(self._xmat(-phi), self._zmat(-theta))
def _zmat(self, theta):
return numpy.array([[numpy.cos(theta), -numpy.sin(theta), 0.0],
[numpy.sin(theta), numpy.cos(theta), 0.0],
[0.0, 0.0, 1.0]])
def _xmat(self, phi):
return numpy.array([[1.0, 0.0, 0.0],
[0.0, numpy.cos(phi), -numpy.sin(phi)],
[0.0, numpy.sin(phi), numpy.cos(phi)]])
def to_plane(self, x):
"""Map the point x back to the horizontal plane."""
return numpy.dot(self.invmat, x)
def x(self, i):
"""Produce a C expression for the ith component
of the image of x mapped back to the horizontal plane."""
return "("+" + ".join(["%.17f * x[%d]" % (a, j)
for (j,a) in enumerate(self.invmat[i,:])])+")"
def rotate(self, mesh):
"""Rotate mesh through phi then theta."""
mesh.coordinates()[:,:] = \
numpy.dot(mesh.coordinates()[:,:], self.mat.T)
def rotate_point(self, point):
"""Rotate point through phi then theta."""
return numpy.dot(point, self.mat.T)
def poisson_2d():
# Create mesh and define function space
mesh = UnitSquareMesh(32, 32)
V = FunctionSpace(mesh, "Lagrange", 1)
# Define Dirichlet boundary (x = 0 or x = 1)
def boundary(x):
return x[0] < DOLFIN_EPS or x[0] > 1.0 - DOLFIN_EPS
# Define boundary condition
u0 = Constant(0.0)
bc = DirichletBC(V, u0, boundary)
# Define variational problem
u = TrialFunction(V)
v = TestFunction(V)
f = Expression("10*exp(-(pow(x[0] - 0.5, 2) + pow(x[1] - 0.5, 2)) / 0.02)")
g = Expression("sin(5*x[0])")
a = inner(grad(u), grad(v))*dx
L = f*v*dx + g*v*ds
# Compute solution
u = Function(V)
solve(a == L, u, bc)
return u
def poisson_manifold():
# Create mesh
cubemesh = UnitCubeMesh(32,32,2)
boundarymesh = BoundaryMesh(cubemesh, "exterior")
mesh = SubMesh(boundarymesh, BottomEdge())
rotation = Rotation(numpy.pi/4, numpy.pi/4)
rotation.rotate(mesh)
# Define function space
V = FunctionSpace(mesh, "Lagrange", 1)
# Define Dirichlet boundary (x = 0 or x = 1)
def boundary(x):
return rotation.to_plane(x)[0] < DOLFIN_EPS or \
rotation.to_plane(x)[0] > 1.0 - DOLFIN_EPS
# Define boundary condition
u0 = Constant(0.0)
bc = DirichletBC(V, u0, boundary)
# Define variational problem
u = TrialFunction(V)
v = TestFunction(V)
f = Expression(("10*exp(-(pow(x[0] - %.17f, 2) "
+ " + pow(x[1] - %.17f, 2)"
+ " + pow(x[2] - %.17f, 2)) / 0.02)")\
% tuple(rotation.rotate_point([0.5,0.5,0])))
g = Expression("sin(5*%s)"%rotation.x(0))
a = inner(grad(u), grad(v))*dx
L = f*v*dx + g*v*ds
# Compute solution
u = Function(V)
solve(a == L, u, bc)
return u
def rotate_2d_mesh(theta):
"""Unit square mesh in 2D rotated through theta about the x and z axes."""
cubemesh = UnitCubeMesh(1,1,1)
boundarymesh = BoundaryMesh(cubemesh, "exterior")
mesh = SubMesh(boundarymesh, BottomEdge())
mesh.init_cell_orientations(Expression(("0.","0.","1.")))
rotation = Rotation(theta, theta)
rotation.rotate(mesh)
return mesh
class ManifoldSolving(unittest.TestCase):
def test_poisson2D_in_3D(self):
"""This test solves Poisson's equation on a unit square in 2D,
and then on a unit square embedded in 3D and rotated pi/4
radians about each of the z and x axes."""
# Boundary mesh not working in parallel
if MPI.num_processes() > 1:
return
u_2D = poisson_2d()
u_manifold = poisson_manifold()
self.assertAlmostEqual(u_2D.vector().norm("l2"),
u_manifold.vector().norm("l2"), 10)
self.assertAlmostEqual(u_2D.vector().max(),
u_manifold.vector().max(), 10)
self.assertAlmostEqual(u_2D.vector().min(),
u_manifold.vector().min(), 10)
class ManifoldBasisEvaluation(unittest.TestCase):
def test_basis_evaluation_2D_in_3D(self):
"""This test checks that basis functions and their derivatives are
unaffected by rotations.
"""
# Boundary mesh not working in parallel
if MPI.num_processes() > 1:
return
self.basemesh = rotate_2d_mesh(0.0)
self.rotmesh = rotate_2d_mesh(numpy.pi/4)
self.rotation = Rotation(numpy.pi/4, numpy.pi/4)
for i in range(4):
self.basis_test("CG", i + 1)
for i in range(5):
self.basis_test("DG", i)
for i in range(4):
self.basis_test("RT", i + 1, piola=True)
for i in range(4):
self.basis_test("BDM", i + 1, piola=True)
for i in range(4):
self.basis_test("N1curl", i + 1, piola=True)
self.basis_test("BDFM", 2, piola=True)
def basis_test(self, family, degree, piola=False):
# Boundary mesh not working in parallel
if MPI.num_processes() > 1:
return
parameters["form_compiler"]["no-evaluate_basis_derivatives"] = False
f_base = FunctionSpace(self.basemesh, family, degree)
f_rot = FunctionSpace(self.rotmesh, family, degree)
points = numpy.array([[1.0, 1.0, 0.0],
[0.5, 0.5, 0.0],
[0.3, 0.7, 0.0],
[0.4, 0.0, 0.0]])
for cell_base, cell_rot in izip(cells(self.basemesh), cells(self.rotmesh)):
values_base = numpy.zeros(f_base.element().value_dimension(0))
derivs_base = numpy.zeros(f_base.element().value_dimension(0)*3)
values_rot = numpy.zeros(f_rot.element().value_dimension(0))
derivs_rot = numpy.zeros(f_rot.element().value_dimension(0)*3)
# Get cell vertices
vertex_coordinates_base = cell_base.get_vertex_coordinates()
vertex_coordinates_rot = cell_rot.get_vertex_coordinates()
for i in range(f_base.element().space_dimension()):
for point in points:
f_base.element().evaluate_basis(i, values_base,
point,
vertex_coordinates_base,
cell_base.orientation())
f_base.element().evaluate_basis_derivatives(i, 1, derivs_base,
point, vertex_coordinates_base,
cell_base.orientation())
f_rot.element().evaluate_basis(i, values_rot,
self.rotation.rotate_point(point),
vertex_coordinates_rot,
cell_rot.orientation())
f_base.element().evaluate_basis_derivatives(i, 1, derivs_rot,
self.rotation.rotate_point(point),
vertex_coordinates_rot,
cell_rot.orientation())
if piola:
values_cmp = self.rotation.rotate_point(values_base)
derivs_rot2 = derivs_rot.reshape(f_rot.element().value_dimension(0),3)
derivs_base2 = derivs_base.reshape(f_base.element().value_dimension(0),3)
# If D is the unrotated derivative tensor, then RDR^T is the rotated version.
derivs_cmp = numpy.dot(self.rotation.mat, self.rotation.rotate_point(derivs_base2))
else:
values_cmp = values_base
# Rotate the derivative for comparison.
derivs_cmp = self.rotation.rotate_point(derivs_base)
derivs_rot2 = derivs_rot
self.assertAlmostEqual(abs(derivs_rot2-derivs_cmp).max(), 0.0, 10)
self.assertAlmostEqual(abs(values_cmp-values_rot).max(), 0.0, 10)
if __name__ == "__main__":
print ""
print "Testing solving and evaluate basis over manifolds"
print "-------------------------------------------------"
unittest.main()
| maciekswat/dolfin_1.3.0 | test/unit/fem/python/manifolds.py | Python | gpl-3.0 | 10,270 |
#!/usr/bin/env python
import smtplib
import os
import ConfigParser
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
confpath = os.path.dirname(__file__)
config = ConfigParser.ConfigParser()
config.read(confpath + "/sendmail.conf")
def sendmail(subject=None,strmsg=None):
'''
Sends an email
'''
# me == my email address
# you == recipient's email address
#Credentials
username = config.get("Config","usrEmail")
password = config.get("Config","usrEmailPwd")
me = config.get("Config","fromAddr")
emaillist = backup.splitNreturn(config.get("Config", "EmailList"))
strmailgw = config.get("Config","smtp")
# Create message container - the correct MIME type is multipart/alternative.
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = me
msg['To'] = ', '.join(emaillist)
# Create the body of the message (a plain-text and an HTML version).
text = strmsg
html = """<html><head></head><body><p>Hi!<br></p></body></html>"""
# Record the MIME types of both parts - text/plain and text/html.
part1 = MIMEText(text, 'plain')
part2 = MIMEText(html, 'html')
# Attach parts into message container.
# According to RFC 2046, the last part of a multipart message, in this case
# the HTML message, is best and preferred.
msg.attach(part1)
#msg.attach(part2)
# Send the message via local SMTP server.
s = smtplib.SMTP(strmailgw)
#Enable secure connection to gmail smtp
s.starttls()
#Set credentials to send email via gmail
s.login(username,password)
# sendmail function takes 3 arguments: sender's address, recipient's address
# and message to send - here it is sent as one string.
s.sendmail(me, emaillist, msg.as_string())
s.quit() | romeroj1/potential-batman | scripts/python/sendemail.py | Python | apache-2.0 | 1,831 |
"""
Local settings for nomadgram project.
- Run in Debug mode
- Use mailhog for emails
- Add Django Debug Toolbar
- Add django-extensions as app
"""
from .base import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='YNEv4Fh)|AhEI)A;,m[K7X*H*Nm%>pHvaF%a8@$E7}0A#+#Jc_')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_PORT = 1025
EMAIL_HOST = 'localhost'
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# django-debug-toolbar
# ------------------------------------------------------------------------------
MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware', ]
INSTALLED_APPS += ['debug_toolbar', ]
INTERNAL_IPS = ['127.0.0.1', '10.0.2.2', ]
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ['django_extensions', ]
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Your local stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
| bokjk/nomadgram | config/settings/local.py | Python | mit | 1,897 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for UpdateIntent
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflowcx
# [START dialogflow_v3beta1_generated_Intents_UpdateIntent_sync]
from google.cloud import dialogflowcx_v3beta1
def sample_update_intent():
# Create a client
client = dialogflowcx_v3beta1.IntentsClient()
# Initialize request argument(s)
intent = dialogflowcx_v3beta1.Intent()
intent.display_name = "display_name_value"
request = dialogflowcx_v3beta1.UpdateIntentRequest(
intent=intent,
)
# Make the request
response = client.update_intent(request=request)
# Handle the response
print(response)
# [END dialogflow_v3beta1_generated_Intents_UpdateIntent_sync]
| googleapis/python-dialogflow-cx | samples/generated_samples/dialogflow_v3beta1_generated_intents_update_intent_sync.py | Python | apache-2.0 | 1,554 |
# -*- coding: utf-8 -*-
"""Preprocessing related functions and classes for testing."""
from artifacts import reader as artifacts_reader
from artifacts import registry as artifacts_registry
from dfvfs.helpers import fake_file_system_builder
from dfvfs.helpers import file_system_searcher
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import factory as path_spec_factory
from dfwinreg import registry as dfwinreg_registry
from dfwinreg import registry_searcher
from plaso.containers import artifacts
from plaso.containers import sessions
from plaso.engine import knowledge_base
from plaso.preprocessors import manager
from plaso.preprocessors import mediator
from plaso.storage.fake import writer as fake_writer
from tests import test_lib as shared_test_lib
class ArtifactPreprocessorPluginTestCase(shared_test_lib.BaseTestCase):
"""Artifact preprocessor plugin test case."""
@classmethod
def setUpClass(cls):
"""Makes preparations before running any of the tests."""
artifacts_path = shared_test_lib.GetTestFilePath(['artifacts'])
cls._artifacts_registry = artifacts_registry.ArtifactDefinitionsRegistry()
reader = artifacts_reader.YamlArtifactsReader()
cls._artifacts_registry.ReadFromDirectory(reader, artifacts_path)
def _CreateTestStorageWriter(self):
"""Creates a storage writer for testing purposes.
Returns:
StorageWriter: storage writer.
"""
storage_writer = fake_writer.FakeStorageWriter()
storage_writer.Open()
return storage_writer
def _RunPreprocessorPluginOnFileSystem(
self, file_system, mount_point, storage_writer, plugin):
"""Runs a preprocessor plugin on a file system.
Args:
file_system (dfvfs.FileSystem): file system to be preprocessed.
mount_point (dfvfs.PathSpec): mount point path specification that refers
to the base location of the file system.
storage_writer (StorageWriter): storage writer.
plugin (ArtifactPreprocessorPlugin): preprocessor plugin.
Return:
PreprocessMediator: preprocess mediator.
"""
artifact_definition = self._artifacts_registry.GetDefinitionByName(
plugin.ARTIFACT_DEFINITION_NAME)
self.assertIsNotNone(artifact_definition)
session = sessions.Session()
test_knowledge_base = knowledge_base.KnowledgeBase()
test_mediator = mediator.PreprocessMediator(
session, storage_writer, test_knowledge_base)
searcher = file_system_searcher.FileSystemSearcher(file_system, mount_point)
plugin.Collect(test_mediator, artifact_definition, searcher, file_system)
return test_mediator
def _RunPreprocessorPluginOnWindowsRegistryValue(
self, file_system, mount_point, storage_writer, plugin):
"""Runs a preprocessor plugin on a Windows Registry value.
Args:
file_system (dfvfs.FileSystem): file system to be preprocessed.
mount_point (dfvfs.PathSpec): mount point path specification that refers
to the base location of the file system.
storage_writer (StorageWriter): storage writer.
plugin (ArtifactPreprocessorPlugin): preprocessor plugin.
Return:
PreprocessMediator: preprocess mediator.
"""
artifact_definition = self._artifacts_registry.GetDefinitionByName(
plugin.ARTIFACT_DEFINITION_NAME)
self.assertIsNotNone(artifact_definition)
environment_variable = artifacts.EnvironmentVariableArtifact(
case_sensitive=False, name='SystemRoot', value='C:\\Windows')
registry_file_reader = manager.FileSystemWinRegistryFileReader(
file_system, mount_point, environment_variables=[environment_variable])
win_registry = dfwinreg_registry.WinRegistry(
registry_file_reader=registry_file_reader)
session = sessions.Session()
test_knowledge_base = knowledge_base.KnowledgeBase()
test_mediator = mediator.PreprocessMediator(
session, storage_writer, test_knowledge_base)
searcher = registry_searcher.WinRegistrySearcher(win_registry)
plugin.Collect(test_mediator, artifact_definition, searcher)
return test_mediator
def _RunPreprocessorPluginOnWindowsRegistryValueSoftware(
self, storage_writer, plugin):
"""Runs a preprocessor plugin on a Windows Registry value in SOFTWARE.
Args:
storage_writer (StorageWriter): storage writer.
plugin (ArtifactPreprocessorPlugin): preprocessor plugin.
Return:
PreprocessMediator: preprocess mediator.
"""
file_system_builder = fake_file_system_builder.FakeFileSystemBuilder()
test_file_path = self._GetTestFilePath(['SOFTWARE'])
file_system_builder.AddFileReadData(
'/Windows/System32/config/SOFTWARE', test_file_path)
mount_point = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_FAKE, location='/')
return self._RunPreprocessorPluginOnWindowsRegistryValue(
file_system_builder.file_system, mount_point, storage_writer, plugin)
def _RunPreprocessorPluginOnWindowsRegistryValueSystem(
self, storage_writer, plugin):
"""Runs a preprocessor plugin on a Windows Registry value in SYSTEM.
Args:
storage_writer (StorageWriter): storage writer.
plugin (ArtifactPreprocessorPlugin): preprocessor plugin.
Return:
PreprocessMediator: preprocess mediator.
"""
file_system_builder = fake_file_system_builder.FakeFileSystemBuilder()
test_file_path = self._GetTestFilePath(['SYSTEM'])
file_system_builder.AddFileReadData(
'/Windows/System32/config/SYSTEM', test_file_path)
mount_point = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_FAKE, location='/')
return self._RunPreprocessorPluginOnWindowsRegistryValue(
file_system_builder.file_system, mount_point, storage_writer, plugin)
| joachimmetz/plaso | tests/preprocessors/test_lib.py | Python | apache-2.0 | 5,825 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import crm_lead
from . import mailing_mailing
from . import utm
| jeremiahyan/odoo | addons/mass_mailing_crm/models/__init__.py | Python | gpl-3.0 | 171 |
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at: http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distrib-
# uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# specific language governing permissions and limitations under the License.
"""Tests for index.py."""
__author__ = 'kpy@google.com (Ka-Ping Yee)'
import webapp2
import webob
import config
import domains
import test_utils
class IndexTest(test_utils.BaseTest):
"""Tests for the Index request handler."""
def testRedirectWithCrisisParam(self):
"""Tests GetDestination with old-style id= and crisis= parameters."""
self.assertEquals(
'http://app.com/root/abc?layers=def',
self.DoGet('/?id=abc&layers=def', 302).headers['Location'])
self.assertEquals(
'http://app.com/root/abc?layers=def',
self.DoGet('/?crisis=abc&layers=def', 302).headers['Location'])
def testRedirectDefault(self):
"""Tests GetDestination with no label parameter."""
self.assertEquals('http://app.com/root/empty',
self.DoGet('', 302).headers['Location'])
self.assertEquals('http://app.com/root/empty?layers=x',
self.DoGet('/?layers=x', 302).headers['Location'])
with test_utils.RootLogin():
domains.Domain.Put('xyz.com', default_label='qwerty')
self.assertEquals('http://app.com/root/qwerty?layers=x',
self.DoGet('/?layers=x', 302).headers['Location'])
with test_utils.RootLogin():
domains.Domain.Put('foo.org', default_label='fancy-label')
response = self.DoGet('/foo.org/?layers=x', 302)
self.assertEquals('http://app.com/root/foo.org/fancy-label?layers=x',
response.headers['Location'])
if __name__ == '__main__':
test_utils.main()
| pnakka/googlecrisismap | index_test.py | Python | apache-2.0 | 2,127 |
"""
Render a set of GeoTIFF files to images.
Stretched renderers may have one of the following colormap values:
1.0 (absolute)
max (calculate max across datasets)
0.5*max (calculate max across datasets, and multiply by value)
"""
import importlib
import os
import glob
import click
import json
import numpy
from PIL.Image import ANTIALIAS, NEAREST
from pyproj import Proj
import rasterio
from rasterio.warp import reproject, calculate_default_transform
from rasterio.enums import Resampling
from trefoil.utilities.color import Color
from trefoil.render.renderers.stretched import StretchedRenderer
from trefoil.render.renderers.unique import UniqueValuesRenderer
from trefoil.render.renderers.utilities import renderer_from_dict
from trefoil.netcdf.utilities import collect_statistics
from trefoil.geometry.bbox import BBox
from trefoil.cli import cli
def _colormap_to_stretched_renderer(colormap, colorspace='hsv', filenames=None, variable=None):
statistics = None
if 'min:' in colormap or 'max:' in colormap or 'mean' in colormap:
if not filenames and variable:
raise ValueError('filenames and variable are required inputs to use colormap with statistics')
statistics = collect_statistics(filenames, (variable,))[variable]
for value in ('min', 'max', 'mean'):
colormap = colormap.replace(value, statistics[value])
return StretchedRenderer(_parse_colormap(colormap), colorspace=colorspace)
def _parse_colormap(colormap_str):
colormap = []
for entry in colormap_str.split(','):
value, color = entry.split(':')
colormap.append((float(value), Color.from_hex(color)))
return colormap
def _palette_to_stretched_renderer(palette_path, values, filenames=None, variable=None):
index = palette_path.rindex('.')
palette = getattr(importlib.import_module('palettable.' + palette_path[:index]), palette_path[index+1:])
values = values.split(',')
if not len(values) > 1:
raise ValueError('Must provide at least 2 values for palette-based stretched renderer')
statistics = None
if 'min' in values or 'max' in values:
if not filenames and variable:
raise ValueError('filenames and variable are required inputs to use palette with statistics')
statistics = collect_statistics(filenames, (variable,))[variable]
for statistic in ('min', 'max'):
if statistic in values:
values[values.index(statistic)] = statistics[statistic]
hex_colors = palette.hex_colors
# TODO: this only works cleanly for min:max or 2 endpoint values. Otherwise require that the number of palette colors match the number of values
colors = [(values[0], Color.from_hex(hex_colors[0]))]
intermediate_colors = hex_colors[1:-1]
if intermediate_colors:
interval = (values[-1] - values[0]) / (len(intermediate_colors) + 1)
for i, color in enumerate(intermediate_colors):
colors.append((values[0] + (i + 1) * interval, Color.from_hex(color)))
colors.append((values[-1], Color.from_hex(hex_colors[-1])))
return StretchedRenderer(colors, colorspace='rgb') # I think all palettable palettes are in RGB ramps
def render_image(renderer, data, filename, scale=1, reproject_kwargs=None):
if reproject_kwargs is not None:
with rasterio.Env():
out = numpy.empty(shape=reproject_kwargs['dst_shape'], dtype=data.dtype)
out.fill(data.fill_value)
reproject(data, out, **reproject_kwargs)
# Reapply mask
data = numpy.ma.masked_array(out, mask=out == data.fill_value)
resampling = ANTIALIAS
if renderer.name == 'unique':
resampling = NEAREST
img = renderer.render_image(data)
if scale != 1:
img = img.resize((numpy.array(data.shape[::-1]) * scale).astype(numpy.uint), resampling)
img.save(filename)
@cli.command(short_help="Render Single-Band GeoTIFF files to images")
@click.argument('filename_pattern')
@click.argument('output_directory', type=click.Path())
@click.option('--renderer_file', help='File containing renderer JSON', type=click.Path())
@click.option('--save', default=False, is_flag=True, help='Save renderer to renderer_file')
@click.option('--renderer_type', default='stretched', type=click.Choice(['stretched', 'unique']), help='Name of renderer [default: stretched]. (other types not yet implemented)')
@click.option('--colormap', default='min:#000000,max:#FFFFFF', help='Provide colormap as comma-separated lookup of value to hex color code. (Example: -1:#FF0000,1:#0000FF) [default: min:#000000,max:#FFFFFF]')
@click.option('--colorspace', default='hsv', type=click.Choice(['hsv', 'rgb']), help='Color interpolation colorspace')
@click.option('--palette', default=None, help='Palettable color palette (Example: colorbrewer.sequential.Blues_3)')
@click.option('--scale', default=1.0, help='Scale factor for data pixel to screen pixel size')
@click.option('--id_variable', help='ID variable used to provide IDs during image generation. Must be of same dimensionality as first dimension of variable (example: time)')
@click.option('--lh', default=150, help='Height of the legend in pixels [default: 150]')
@click.option('--legend_breaks', default=None, type=click.INT, help='Number of breaks to show on legend for stretched renderer')
@click.option('--legend_ticks', default=None, type=click.STRING, help='Legend tick values for stretched renderer')
# Projection related options
@click.option('--src_crs', default=None, type=click.STRING, help='Source coordinate reference system (limited to EPSG codes, e.g., EPSG:4326). Will be read from file if not provided.')
@click.option('--dst_crs', default=None, type=click.STRING, help='Destination coordinate reference system')
@click.option('--res', default=None, type=click.FLOAT, help='Destination pixel resolution in destination coordinate system units' )
@click.option('--resampling', default='nearest', type=click.Choice(('nearest', 'cubic', 'lanczos', 'mode')), help='Resampling method for reprojection (default: nearest')
@click.option('--anchors', default=False, is_flag=True, help='Print anchor coordinates for use in Leaflet ImageOverlay')
# TODO: option with transform info if not a geo format
def render_tif(
filename_pattern,
output_directory,
renderer_file,
save,
renderer_type,
colormap,
colorspace,
palette,
scale,
id_variable,
lh,
legend_breaks,
legend_ticks,
src_crs,
dst_crs,
res,
resampling,
anchors):
"""
Render single-band GeoTIFF files to images.
colormap is ignored if renderer_file is provided
"""
filenames = glob.glob(filename_pattern)
if not filenames:
raise click.BadParameter('No files found matching that pattern', param='filename_pattern', param_hint='FILENAME_PATTERN')
if not os.path.exists(output_directory):
os.makedirs(output_directory)
if renderer_file is not None and not save:
if not os.path.exists(renderer_file):
raise click.BadParameter('does not exist', param='renderer_file', param_hint='renderer_file')
# see https://bitbucket.org/databasin/ncdjango/wiki/Home for format
renderer_dict = json.loads(open(renderer_file).read())
# if renderer_dict['type'] == 'stretched':
# colors = ','.join([str(c[0]) for c in renderer_dict['colors']])
# if 'min' in colors or 'max' in colors or 'mean' in colors:
# statistics = collect_statistics(filenames, (variable,))[variable]
# for entry in renderer_dict['colors']:
# if isinstance(entry[0], basestring):
# if entry[0] in ('min', 'max', 'mean'):
# entry[0] = statistics[entry[0]]
# elif '*' in entry[0]:
# rel_value, statistic = entry[0].split('*')
# entry[0] = float(rel_value) * statistics[statistic]
renderer = renderer_from_dict(renderer_dict)
else:
if renderer_type == 'stretched':
# if palette is not None:
# renderer = _palette_to_stretched_renderer(palette, 'min,max', filenames, variable)
#
# else:
renderer = _colormap_to_stretched_renderer(colormap, colorspace, filenames)
elif renderer_type == 'unique':
renderer = UniqueValuesRenderer(_parse_colormap(colormap), colorspace)
else:
raise NotImplementedError('other renderers not yet built')
# if save:
# if not renderer_file:
# raise click.BadParameter('must be provided to save', param='renderer_file', param_hint='renderer_file')
#
# if os.path.exists(renderer_file):
# with open(renderer_file, 'r+') as output_file:
# data = json.loads(output_file.read())
# output_file.seek(0)
# output_file.truncate()
# data[variable] = renderer.serialize()
# output_file.write(json.dumps(data, indent=4))
# else:
# with open(renderer_file, 'w') as output_file:
# output_file.write(json.dumps({variable: renderer.serialize()}))
if renderer_type == 'streteched':
if legend_ticks is not None and not legend_breaks:
legend_ticks = [float(v) for v in legend_ticks.split(',')]
legend = renderer.get_legend(image_height=lh, breaks=legend_breaks, ticks=legend_ticks, max_precision=2)[0].to_image()
elif renderer_type == 'unique':
legend = renderer.get_legend(image_height=lh)[0].to_image()
legend.save(os.path.join(output_directory, 'legend.png'))
for filename in filenames:
with rasterio.open(filename) as ds:
print('Processing',filename)
filename_root = os.path.split(filename)[1].replace('.nc', '')
data = ds.read(1, masked=True)
# # get transforms, assume last 2 dimensions on variable are spatial in row, col order
# y_dim, x_dim = ds.variables[variable].dimensions[-2:]
# y_len, x_len = data.shape[-2:]
# coords = SpatialCoordinateVariables.from_dataset(ds, x_dim, y_dim)#, projection=Proj(src_crs))
#
# if coords.y.is_ascending_order():
# data = data[::-1]
#
reproject_kwargs = None
if dst_crs is not None:
# TODO: extract this out into a general trefoil reprojection function
ds_crs = ds.crs
if not (src_crs or ds_crs):
raise click.BadParameter('must provide src_crs to reproject', param='src_crs', param_hint='src_crs')
dst_crs = {'init': dst_crs}
src_crs = ds_crs if ds_crs else {'init': src_crs}
left, bottom, top, right = ds.bounds
dst_affine, dst_width, dst_height = calculate_default_transform(left, bottom, right, top, ds.width, ds.height, src_crs, dst_crs)
dst_shape = (dst_height, dst_width)
# proj_bbox = coords.bbox.project(Proj(dst_crs))
#
# x_dif = proj_bbox.xmax - proj_bbox.xmin
# y_dif = proj_bbox.ymax - proj_bbox.ymin
#
# total_len = float(x_len + y_len)
# # Cellsize is dimension weighted average of x and y dimensions per projected pixel, unless otherwise provided
# avg_cellsize = ((x_dif / float(x_len)) * (float(x_len) / total_len)) + ((y_dif / float(y_len)) * (float(y_len) / total_len))
#
# cellsize = res or avg_cellsize
# dst_affine = Affine(cellsize, 0, proj_bbox.xmin, 0, -cellsize, proj_bbox.ymax)
# dst_shape = (
# max(int(ceil((y_dif) / cellsize)), 1), # height
# max(int(ceil(x_dif / cellsize)), 1) # width
# )
# TODO: replace with method in rasterio
reproject_kwargs = {
'src_crs': src_crs,
'src_transform': ds.affine,
'dst_crs': dst_crs,
'dst_transform': dst_affine,
'resampling': getattr(Resampling, resampling),
'dst_shape': dst_shape
}
if anchors:
# Reproject the bbox of the output to WGS84
full_bbox = BBox((dst_affine.c, dst_affine.f + dst_affine.e * dst_shape[0],
dst_affine.c + dst_affine.a * dst_shape[1], dst_affine.f),
projection=Proj(dst_crs))
wgs84_bbox = full_bbox.project(Proj(init='EPSG:4326'))
print('WGS84 Anchors: {0}'.format([[wgs84_bbox.ymin, wgs84_bbox.xmin], [wgs84_bbox.ymax, wgs84_bbox.xmax]]))
elif anchors:
# Reproject the bbox of the output to WGS84
full_bbox = BBox(ds.bounds, projection=Proj(ds.crs))
wgs84_bbox = full_bbox.project(Proj(init='EPSG:4326'))
print('WGS84 Anchors: {0}'.format([[wgs84_bbox.ymin, wgs84_bbox.xmin], [wgs84_bbox.ymax, wgs84_bbox.xmax]]))
image_filename = os.path.join(output_directory,
'{0}.png'.format(filename_root))
render_image(renderer, data, image_filename, scale, reproject_kwargs=reproject_kwargs)
| consbio/clover | trefoil/cli/render_tif.py | Python | bsd-3-clause | 13,680 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains the Apache Livy hook."""
import json
import re
from enum import Enum
from typing import Any, Dict, List, Optional, Sequence, Union
import requests
from airflow.exceptions import AirflowException
from airflow.providers.http.hooks.http import HttpHook
from airflow.utils.log.logging_mixin import LoggingMixin
class BatchState(Enum):
"""Batch session states"""
NOT_STARTED = 'not_started'
STARTING = 'starting'
RUNNING = 'running'
IDLE = 'idle'
BUSY = 'busy'
SHUTTING_DOWN = 'shutting_down'
ERROR = 'error'
DEAD = 'dead'
KILLED = 'killed'
SUCCESS = 'success'
class LivyHook(HttpHook, LoggingMixin):
"""
Hook for Apache Livy through the REST API.
:param livy_conn_id: reference to a pre-defined Livy Connection.
:param extra_options: A dictionary of options passed to Livy.
:param extra_headers: A dictionary of headers passed to the HTTP request to livy.
.. seealso::
For more details refer to the Apache Livy API reference:
https://livy.apache.org/docs/latest/rest-api.html
"""
TERMINAL_STATES = {
BatchState.SUCCESS,
BatchState.DEAD,
BatchState.KILLED,
BatchState.ERROR,
}
_def_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
conn_name_attr = 'livy_conn_id'
default_conn_name = 'livy_default'
conn_type = 'livy'
hook_name = 'Apache Livy'
def __init__(
self,
livy_conn_id: str = default_conn_name,
extra_options: Optional[Dict[str, Any]] = None,
extra_headers: Optional[Dict[str, Any]] = None,
) -> None:
super().__init__(http_conn_id=livy_conn_id)
self.extra_headers = extra_headers or {}
self.extra_options = extra_options or {}
def get_conn(self, headers: Optional[Dict[str, Any]] = None) -> Any:
"""
Returns http session for use with requests
:param headers: additional headers to be passed through as a dictionary
:return: requests session
:rtype: requests.Session
"""
tmp_headers = self._def_headers.copy() # setting default headers
if headers:
tmp_headers.update(headers)
return super().get_conn(tmp_headers)
def run_method(
self,
endpoint: str,
method: str = 'GET',
data: Optional[Any] = None,
headers: Optional[Dict[str, Any]] = None,
) -> Any:
"""
Wrapper for HttpHook, allows to change method on the same HttpHook
:param method: http method
:param endpoint: endpoint
:param data: request payload
:param headers: headers
:return: http response
:rtype: requests.Response
"""
if method not in ('GET', 'POST', 'PUT', 'DELETE', 'HEAD'):
raise ValueError(f"Invalid http method '{method}'")
if not self.extra_options:
self.extra_options = {'check_response': False}
back_method = self.method
self.method = method
try:
result = self.run(endpoint, data, headers, self.extra_options)
finally:
self.method = back_method
return result
def post_batch(self, *args: Any, **kwargs: Any) -> Any:
"""
Perform request to submit batch
:return: batch session id
:rtype: int
"""
batch_submit_body = json.dumps(self.build_post_batch_body(*args, **kwargs))
if self.base_url is None:
# need to init self.base_url
self.get_conn()
self.log.info("Submitting job %s to %s", batch_submit_body, self.base_url)
response = self.run_method(
method='POST', endpoint='/batches', data=batch_submit_body, headers=self.extra_headers
)
self.log.debug("Got response: %s", response.text)
try:
response.raise_for_status()
except requests.exceptions.HTTPError as err:
raise AirflowException(
"Could not submit batch. "
f"Status code: {err.response.status_code}. Message: '{err.response.text}'"
)
batch_id = self._parse_post_response(response.json())
if batch_id is None:
raise AirflowException("Unable to parse the batch session id")
self.log.info("Batch submitted with session id: %d", batch_id)
return batch_id
def get_batch(self, session_id: Union[int, str]) -> Any:
"""
Fetch info about the specified batch
:param session_id: identifier of the batch sessions
:return: response body
:rtype: dict
"""
self._validate_session_id(session_id)
self.log.debug("Fetching info for batch session %d", session_id)
response = self.run_method(endpoint=f'/batches/{session_id}')
try:
response.raise_for_status()
except requests.exceptions.HTTPError as err:
self.log.warning("Got status code %d for session %d", err.response.status_code, session_id)
raise AirflowException(
f"Unable to fetch batch with id: {session_id}. Message: {err.response.text}"
)
return response.json()
def get_batch_state(self, session_id: Union[int, str]) -> BatchState:
"""
Fetch the state of the specified batch
:param session_id: identifier of the batch sessions
:return: batch state
:rtype: BatchState
"""
self._validate_session_id(session_id)
self.log.debug("Fetching info for batch session %d", session_id)
response = self.run_method(endpoint=f'/batches/{session_id}/state')
try:
response.raise_for_status()
except requests.exceptions.HTTPError as err:
self.log.warning("Got status code %d for session %d", err.response.status_code, session_id)
raise AirflowException(
f"Unable to fetch batch with id: {session_id}. Message: {err.response.text}"
)
jresp = response.json()
if 'state' not in jresp:
raise AirflowException(f"Unable to get state for batch with id: {session_id}")
return BatchState(jresp['state'])
def delete_batch(self, session_id: Union[int, str]) -> Any:
"""
Delete the specified batch
:param session_id: identifier of the batch sessions
:return: response body
:rtype: dict
"""
self._validate_session_id(session_id)
self.log.info("Deleting batch session %d", session_id)
response = self.run_method(method='DELETE', endpoint=f'/batches/{session_id}')
try:
response.raise_for_status()
except requests.exceptions.HTTPError as err:
self.log.warning("Got status code %d for session %d", err.response.status_code, session_id)
raise AirflowException(
f"Could not kill the batch with session id: {session_id}. Message: {err.response.text}"
)
return response.json()
def get_batch_logs(self, session_id: Union[int, str], log_start_position, log_batch_size) -> Any:
"""
Gets the session logs for a specified batch.
:param session_id: identifier of the batch sessions
:param log_start_position: Position from where to pull the logs
:param log_batch_size: Number of lines to pull in one batch
:return: response body
:rtype: dict
"""
self._validate_session_id(session_id)
log_params = {'from': log_start_position, 'size': log_batch_size}
response = self.run_method(endpoint=f'/batches/{session_id}/log', data=log_params)
try:
response.raise_for_status()
except requests.exceptions.HTTPError as err:
self.log.warning("Got status code %d for session %d", err.response.status_code, session_id)
raise AirflowException(
f"Could not fetch the logs for batch with session id: {session_id}. "
f"Message: {err.response.text}"
)
return response.json()
def dump_batch_logs(self, session_id: Union[int, str]) -> Any:
"""
Dumps the session logs for a specified batch
:param session_id: identifier of the batch sessions
:return: response body
:rtype: dict
"""
self.log.info("Fetching the logs for batch session with id: %d", session_id)
log_start_line = 0
log_total_lines = 0
log_batch_size = 100
while log_start_line <= log_total_lines:
# Livy log endpoint is paginated.
response = self.get_batch_logs(session_id, log_start_line, log_batch_size)
log_total_lines = self._parse_request_response(response, 'total')
log_start_line += log_batch_size
log_lines = self._parse_request_response(response, 'log')
for log_line in log_lines:
self.log.info(log_line)
@staticmethod
def _validate_session_id(session_id: Union[int, str]) -> None:
"""
Validate session id is a int
:param session_id: session id
"""
try:
int(session_id)
except (TypeError, ValueError):
raise TypeError("'session_id' must be an integer")
@staticmethod
def _parse_post_response(response: Dict[Any, Any]) -> Any:
"""
Parse batch response for batch id
:param response: response body
:return: session id
:rtype: int
"""
return response.get('id')
@staticmethod
def _parse_request_response(response: Dict[Any, Any], parameter) -> Any:
"""
Parse batch response for batch id
:param response: response body
:return: value of parameter
:rtype: Union[int, list]
"""
return response.get(parameter)
@staticmethod
def build_post_batch_body(
file: str,
args: Optional[Sequence[Union[str, int, float]]] = None,
class_name: Optional[str] = None,
jars: Optional[List[str]] = None,
py_files: Optional[List[str]] = None,
files: Optional[List[str]] = None,
archives: Optional[List[str]] = None,
name: Optional[str] = None,
driver_memory: Optional[str] = None,
driver_cores: Optional[Union[int, str]] = None,
executor_memory: Optional[str] = None,
executor_cores: Optional[int] = None,
num_executors: Optional[Union[int, str]] = None,
queue: Optional[str] = None,
proxy_user: Optional[str] = None,
conf: Optional[Dict[Any, Any]] = None,
) -> Any:
"""
Build the post batch request body.
For more information about the format refer to
.. seealso:: https://livy.apache.org/docs/latest/rest-api.html
:param file: Path of the file containing the application to execute (required).
:param proxy_user: User to impersonate when running the job.
:param class_name: Application Java/Spark main class string.
:param args: Command line arguments for the application s.
:param jars: jars to be used in this sessions.
:param py_files: Python files to be used in this session.
:param files: files to be used in this session.
:param driver_memory: Amount of memory to use for the driver process string.
:param driver_cores: Number of cores to use for the driver process int.
:param executor_memory: Amount of memory to use per executor process string.
:param executor_cores: Number of cores to use for each executor int.
:param num_executors: Number of executors to launch for this session int.
:param archives: Archives to be used in this session.
:param queue: The name of the YARN queue to which submitted string.
:param name: The name of this session string.
:param conf: Spark configuration properties.
:return: request body
:rtype: dict
"""
body: Dict[str, Any] = {'file': file}
if proxy_user:
body['proxyUser'] = proxy_user
if class_name:
body['className'] = class_name
if args and LivyHook._validate_list_of_stringables(args):
body['args'] = [str(val) for val in args]
if jars and LivyHook._validate_list_of_stringables(jars):
body['jars'] = jars
if py_files and LivyHook._validate_list_of_stringables(py_files):
body['pyFiles'] = py_files
if files and LivyHook._validate_list_of_stringables(files):
body['files'] = files
if driver_memory and LivyHook._validate_size_format(driver_memory):
body['driverMemory'] = driver_memory
if driver_cores:
body['driverCores'] = driver_cores
if executor_memory and LivyHook._validate_size_format(executor_memory):
body['executorMemory'] = executor_memory
if executor_cores:
body['executorCores'] = executor_cores
if num_executors:
body['numExecutors'] = num_executors
if archives and LivyHook._validate_list_of_stringables(archives):
body['archives'] = archives
if queue:
body['queue'] = queue
if name:
body['name'] = name
if conf and LivyHook._validate_extra_conf(conf):
body['conf'] = conf
return body
@staticmethod
def _validate_size_format(size: str) -> bool:
"""
Validate size format.
:param size: size value
:return: true if valid format
:rtype: bool
"""
if size and not (isinstance(size, str) and re.match(r'^\d+[kmgt]b?$', size, re.IGNORECASE)):
raise ValueError(f"Invalid java size format for string'{size}'")
return True
@staticmethod
def _validate_list_of_stringables(vals: Sequence[Union[str, int, float]]) -> bool:
"""
Check the values in the provided list can be converted to strings.
:param vals: list to validate
:return: true if valid
:rtype: bool
"""
if (
vals is None
or not isinstance(vals, (tuple, list))
or any(1 for val in vals if not isinstance(val, (str, int, float)))
):
raise ValueError("List of strings expected")
return True
@staticmethod
def _validate_extra_conf(conf: Dict[Any, Any]) -> bool:
"""
Check configuration values are either strings or ints.
:param conf: configuration variable
:return: true if valid
:rtype: bool
"""
if conf:
if not isinstance(conf, dict):
raise ValueError("'conf' argument must be a dict")
if any(True for k, v in conf.items() if not (v and isinstance(v, str) or isinstance(v, int))):
raise ValueError("'conf' values must be either strings or ints")
return True
| Acehaidrey/incubator-airflow | airflow/providers/apache/livy/hooks/livy.py | Python | apache-2.0 | 15,883 |
# coding: utf-8
from getpass import getpass
class CLIInput():
def get_user_name(self):
return input('user name: ')
def get_password(self):
return getpass()
def entry_selector(self, entries):
if not entries:
return None, None
titles = list(entries.keys())
for i, title in enumerate(titles):
print('[{0}] {1}'.format(i, title))
number = input('> ')
if number.isdigit() and int(number) <= len(titles):
title = titles[int(number)]
return title, entries[title]
else:
return None, None
def get_entry_info(self, default={}):
entry = {}
def getter(name):
default_value = default.get(name)
default_value = default_value if default_value else ''
value = input('{0} [{1}]: '.format(name, default_value))
return value if value else default_value
title = getter('title')
keys = ['user', 'password', 'other']
for key in keys:
entry[key] = getter(key)
return title, entry
| meganehouser/pww | pww/inputs.py | Python | mit | 1,117 |
""" Utilities to fuse trees and wheels
To "fuse" is to merge two binary libraries of different architectures - see
func:`delocate.tools.lipo_fuse`.
The procedure for fusing two trees (or trees in wheels) is similar to updating
a dictionary. There is a lhs of an update (fuse) called ``to_tree`` and a rhs
called ``from_tree``. All files present in ``from_tree`` get copied into
``to_tree``, unless [the file is a library AND there is a corresponding file
with the same relative path in ``to_tree``]. In this case the two files are
"fused" - meaning we use ``lipo_fuse`` to merge the architectures in the two
libraries.
"""
import os
import shutil
from os.path import abspath, exists
from os.path import join as pjoin
from os.path import relpath, splitext
from .tmpdirs import InTemporaryDirectory
from .tools import (
chmod_perms,
cmp_contents,
dir2zip,
lipo_fuse,
open_rw,
zip2dir,
)
from .wheeltools import rewrite_record
def _copyfile(in_fname, out_fname):
# Copies files without read / write permission
perms = chmod_perms(in_fname)
with open_rw(in_fname, "rb") as fobj:
contents = fobj.read()
with open_rw(out_fname, "wb") as fobj:
fobj.write(contents)
os.chmod(out_fname, perms)
def fuse_trees(to_tree, from_tree, lib_exts=(".so", ".dylib", ".a")):
"""Fuse path `from_tree` into path `to_tree`
For each file in `from_tree` - check for library file extension (in
`lib_exts` - if present, check if there is a file with matching relative
path in `to_tree`, if so, use :func:`delocate.tools.lipo_fuse` to fuse the
two libraries together and write into `to_tree`. If any of these
conditions are not met, just copy the file from `from_tree` to `to_tree`.
Parameters
---------
to_tree : str
path of tree to fuse into (update into)
from_tree : str
path of tree to fuse from (update from)
lib_exts : sequence, optional
filename extensions for libraries
"""
for from_dirpath, dirnames, filenames in os.walk(from_tree):
to_dirpath = pjoin(to_tree, relpath(from_dirpath, from_tree))
# Copy any missing directories in to_path
for dirname in tuple(dirnames):
to_path = pjoin(to_dirpath, dirname)
if not exists(to_path):
from_path = pjoin(from_dirpath, dirname)
shutil.copytree(from_path, to_path)
# If copying, don't further analyze this directory
dirnames.remove(dirname)
for fname in filenames:
root, ext = splitext(fname)
from_path = pjoin(from_dirpath, fname)
to_path = pjoin(to_dirpath, fname)
if not exists(to_path):
_copyfile(from_path, to_path)
elif cmp_contents(from_path, to_path):
pass
elif ext in lib_exts:
# existing lib that needs fuse
lipo_fuse(from_path, to_path, to_path)
else:
# existing not-lib file not identical to source
_copyfile(from_path, to_path)
def fuse_wheels(to_wheel, from_wheel, out_wheel):
"""Fuse `from_wheel` into `to_wheel`, write to `out_wheel`
Parameters
---------
to_wheel : str
filename of wheel to fuse into
from_wheel : str
filename of wheel to fuse from
out_wheel : str
filename of new wheel from fusion of `to_wheel` and `from_wheel`
"""
to_wheel, from_wheel, out_wheel = [
abspath(w) for w in (to_wheel, from_wheel, out_wheel)
]
with InTemporaryDirectory():
zip2dir(to_wheel, "to_wheel")
zip2dir(from_wheel, "from_wheel")
fuse_trees("to_wheel", "from_wheel")
rewrite_record("to_wheel")
dir2zip("to_wheel", out_wheel)
| matthew-brett/delocate | delocate/fuse.py | Python | bsd-2-clause | 3,819 |
"""Support for views."""
from __future__ import annotations
import asyncio
from collections.abc import Awaitable, Callable
from http import HTTPStatus
import json
import logging
from typing import Any
from aiohttp import web
from aiohttp.typedefs import LooseHeaders
from aiohttp.web_exceptions import (
HTTPBadRequest,
HTTPInternalServerError,
HTTPUnauthorized,
)
from aiohttp.web_urldispatcher import AbstractRoute
import voluptuous as vol
from homeassistant import exceptions
from homeassistant.const import CONTENT_TYPE_JSON
from homeassistant.core import Context, is_callback
from homeassistant.helpers.json import JSONEncoder
from .const import KEY_AUTHENTICATED, KEY_HASS
_LOGGER = logging.getLogger(__name__)
class HomeAssistantView:
"""Base view for all views."""
url: str | None = None
extra_urls: list[str] = []
# Views inheriting from this class can override this
requires_auth = True
cors_allowed = False
@staticmethod
def context(request: web.Request) -> Context:
"""Generate a context from a request."""
if (user := request.get("hass_user")) is None:
return Context()
return Context(user_id=user.id)
@staticmethod
def json(
result: Any,
status_code: HTTPStatus | int = HTTPStatus.OK,
headers: LooseHeaders | None = None,
) -> web.Response:
"""Return a JSON response."""
try:
msg = json.dumps(result, cls=JSONEncoder, allow_nan=False).encode("UTF-8")
except (ValueError, TypeError) as err:
_LOGGER.error("Unable to serialize to JSON: %s\n%s", err, result)
raise HTTPInternalServerError from err
response = web.Response(
body=msg,
content_type=CONTENT_TYPE_JSON,
status=int(status_code),
headers=headers,
)
response.enable_compression()
return response
def json_message(
self,
message: str,
status_code: HTTPStatus | int = HTTPStatus.OK,
message_code: str | None = None,
headers: LooseHeaders | None = None,
) -> web.Response:
"""Return a JSON message response."""
data = {"message": message}
if message_code is not None:
data["code"] = message_code
return self.json(data, status_code, headers=headers)
def register(self, app: web.Application, router: web.UrlDispatcher) -> None:
"""Register the view with a router."""
assert self.url is not None, "No url set for view"
urls = [self.url] + self.extra_urls
routes: list[AbstractRoute] = []
for method in ("get", "post", "delete", "put", "patch", "head", "options"):
if not (handler := getattr(self, method, None)):
continue
handler = request_handler_factory(self, handler)
for url in urls:
routes.append(router.add_route(method, url, handler))
# Use `get` because CORS middleware is not be loaded in emulated_hue
if self.cors_allowed:
allow_cors = app.get("allow_all_cors")
else:
allow_cors = app.get("allow_configured_cors")
if allow_cors:
for route in routes:
allow_cors(route)
def request_handler_factory(
view: HomeAssistantView, handler: Callable
) -> Callable[[web.Request], Awaitable[web.StreamResponse]]:
"""Wrap the handler classes."""
assert asyncio.iscoroutinefunction(handler) or is_callback(
handler
), "Handler should be a coroutine or a callback."
async def handle(request: web.Request) -> web.StreamResponse:
"""Handle incoming request."""
if request.app[KEY_HASS].is_stopping:
return web.Response(status=HTTPStatus.SERVICE_UNAVAILABLE)
authenticated = request.get(KEY_AUTHENTICATED, False)
if view.requires_auth and not authenticated:
raise HTTPUnauthorized()
_LOGGER.debug(
"Serving %s to %s (auth: %s)",
request.path,
request.remote,
authenticated,
)
try:
result = handler(request, **request.match_info)
if asyncio.iscoroutine(result):
result = await result
except vol.Invalid as err:
raise HTTPBadRequest() from err
except exceptions.ServiceNotFound as err:
raise HTTPInternalServerError() from err
except exceptions.Unauthorized as err:
raise HTTPUnauthorized() from err
if isinstance(result, web.StreamResponse):
# The method handler returned a ready-made Response, how nice of it
return result
status_code = HTTPStatus.OK
if isinstance(result, tuple):
result, status_code = result
if isinstance(result, bytes):
bresult = result
elif isinstance(result, str):
bresult = result.encode("utf-8")
elif result is None:
bresult = b""
else:
assert (
False
), f"Result should be None, string, bytes or StreamResponse. Got: {result}"
return web.Response(body=bresult, status=status_code)
return handle
| jawilson/home-assistant | homeassistant/components/http/view.py | Python | apache-2.0 | 5,288 |
# Copyright 2018 ACSONE SA/NV
# Copyright 2019 Eficent and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from datetime import date, timedelta
from odoo import _, api, fields, models
from odoo.exceptions import ValidationError
class SaleOrder(models.Model):
_inherit = "sale.order"
blanket_order_id = fields.Many2one(
"sale.blanket.order",
string="Origin blanket order",
related="order_line.blanket_order_line.order_id",
)
@api.model
def _check_exchausted_blanket_order_line(self):
return any(
line.blanket_order_line.remaining_qty < 0.0 for line in self.order_line
)
def button_confirm(self):
res = super().button_confirm()
for order in self:
if order._check_exchausted_blanket_order_line():
raise ValidationError(
_(
"Cannot confirm order %s as one of the lines refers "
"to a blanket order that has no remaining quantity."
)
% order.name
)
return res
@api.constrains("partner_id")
def check_partner_id(self):
for line in self.order_line:
if line.blanket_order_line:
if line.blanket_order_line.partner_id != self.partner_id:
raise ValidationError(
_(
"The customer must be equal to the "
"blanket order lines customer"
)
)
class SaleOrderLine(models.Model):
_inherit = "sale.order.line"
blanket_order_line = fields.Many2one(
"sale.blanket.order.line", string="Blanket Order line", copy=False
)
def _get_assigned_bo_line(self, bo_lines):
# We get the blanket order line with enough quantity and closest
# scheduled date
assigned_bo_line = False
date_planned = date.today()
date_delta = timedelta(days=365)
for line in bo_lines.filtered(lambda l: l.date_schedule):
date_schedule = line.date_schedule
if date_schedule and abs(date_schedule - date_planned) < date_delta:
assigned_bo_line = line
date_delta = abs(date_schedule - date_planned)
if assigned_bo_line:
return assigned_bo_line
non_date_bo_lines = bo_lines.filtered(lambda l: not l.date_schedule)
if non_date_bo_lines:
return non_date_bo_lines[0]
def _get_eligible_bo_lines_domain(self, base_qty):
filters = [
("product_id", "=", self.product_id.id),
("remaining_qty", ">=", base_qty),
("currency_id", "=", self.order_id.currency_id.id),
("order_id.state", "=", "open"),
]
if self.order_id.partner_id:
filters.append(("partner_id", "=", self.order_id.partner_id.id))
return filters
def _get_eligible_bo_lines(self):
base_qty = self.product_uom._compute_quantity(
self.product_uom_qty, self.product_id.uom_id
)
filters = self._get_eligible_bo_lines_domain(base_qty)
return self.env["sale.blanket.order.line"].search(filters)
def get_assigned_bo_line(self):
self.ensure_one()
eligible_bo_lines = self._get_eligible_bo_lines()
if eligible_bo_lines:
if (
not self.blanket_order_line
or self.blanket_order_line not in eligible_bo_lines
):
self.blanket_order_line = self._get_assigned_bo_line(eligible_bo_lines)
else:
self.blanket_order_line = False
self.onchange_blanket_order_line()
return {"domain": {"blanket_order_line": [("id", "in", eligible_bo_lines.ids)]}}
@api.onchange("product_id", "order_partner_id")
def onchange_product_id(self):
# If product has changed remove the relation with blanket order line
if self.product_id:
return self.get_assigned_bo_line()
return
@api.onchange("product_uom_qty", "product_uom")
def product_uom_change(self):
res = super().product_uom_change()
if self.product_id and not self.env.context.get("skip_blanket_find", False):
return self.get_assigned_bo_line()
return res
@api.onchange("blanket_order_line")
def onchange_blanket_order_line(self):
bol = self.blanket_order_line
if bol:
self.product_id = bol.product_id
if bol.product_uom != self.product_uom:
price_unit = bol.product_uom._compute_price(
bol.price_unit, self.product_uom
)
else:
price_unit = bol.price_unit
self.price_unit = price_unit
if bol.taxes_id:
self.tax_id = bol.taxes_id
else:
if not self.tax_id:
self._compute_tax_id()
self.with_context(skip_blanket_find=True).product_uom_change()
@api.constrains("product_id")
def check_product_id(self):
for line in self:
if (
line.blanket_order_line
and line.product_id != line.blanket_order_line.product_id
):
raise ValidationError(
_(
"The product in the blanket order and in the "
"sales order must match"
)
)
@api.constrains("currency_id")
def check_currency(self):
for line in self:
if line.blanket_order_line:
if line.currency_id != line.blanket_order_line.order_id.currency_id:
raise ValidationError(
_(
"The currency of the blanket order must match with "
"that of the sale order."
)
)
| OCA/sale-workflow | sale_blanket_order/models/sale_orders.py | Python | agpl-3.0 | 6,042 |
import random
number = random.randint(1,100)
def guessAgain():
i = input("Again? y/n ")
i = i.lower()
if i == "y":
return True
elif i == "n":
return False
else:
print("I don't understand? Try y or n.")
return None
print("I'm thinking of a number between 1 and 100.")
play = True
count = 0
while play == True:
try:
guess=int(input("Guess what it is? "))
except ValueError:
print("I said I was thinking of a *number*")
continue
count+=1
if guess > 100 or guess <1:
print("You aren't even trying")
elif guess < number:
print("Too low")
elif guess > number:
print("That is too high!")
elif guess == number:
print("You guessed it! It took you "+str(count)+" guesses.")
count = 0
number = random.randint(1,100)
play = None
while play == None:
play = guessAgain()
| disasterisk/itc110 | guess.py | Python | mit | 1,117 |
"""
Copyright 2009, Red Hat, Inc and Others
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
from base import *
class MgmtclassTests(CobblerTest):
def setUp(self):
CobblerTest.setUp(self)
(self.package_id, self.package_name) = self.create_package()
(self.file_id, self.file_name) = self.create_file()
def test_create_mgmtclass(self):
""" Test creation of a cobbler mgmtclass. """
(mgmtclass_id, mgmtclass_name) = self.create_mgmtclass(self.package_name, self.file_name)
mgmtclasses = self.api.find_mgmtclass({'name': mgmtclass_name})
self.assertTrue(len(mgmtclasses) > 0)
| spotify/cobbler | tests/mgmtclass_test.py | Python | gpl-2.0 | 1,286 |
# =============================================================================
# Copyright [2013] [Kevin Carter]
# License Information :
# This software has no warranty, it is provided 'as is'. It is your
# responsibility to validate the behavior of the routines and its accuracy
# using the code provided. Consult the GNU General Public license for further
# details (see GNU General Public License).
# http://www.gnu.org/licenses/gpl.html
# =============================================================================
__author__ = "Kevin Carter"
__contact__ = "kevin.carter@rackspace.com"
__email__ = "kevin.carter@RACKSPACE.COM"
__copyright__ = "2013 All Rights Reserved"
__version__ = "0.0.1"
__status__ = "BETA"
__appname__ = "tribble"
__license__ = 'GNU General Public License v3 or later (GPLv3+)'
__description__ = "Application to build clouds using Config Management"
__urlinformation__ = ""
| cloudnull/tribble-api | tribble/info.py | Python | gpl-3.0 | 903 |
from model.contact import Contact
import re
class ContactHelper:
def __init__(self, app):
self.app = app
def create(self, contact):
wd = self.app.wd
# init contact creations
wd.find_element_by_link_text("add new").click()
# fill contact form
self.fill_contact_form(contact)
# submit contact creations
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
wd.find_element_by_link_text("home page").click()
self.contact_cache = None
def fill_contact_form(self, contact):
wd = self.app.wd
self.change_field_value("firstname", contact.first_name)
self.change_field_value("lastname", contact.last_name)
self.change_field_value("company", contact.company)
self.change_field_value("address", contact.address)
self.change_field_value("address2", contact.address2)
self.change_field_value("home", contact.phone_home)
self.change_field_value("email", contact.e_mail)
self.change_field_value("email2", contact.e_mail_2)
self.change_field_value("email3", contact.e_mail_3)
self.change_field_value("mobile", contact.mobilephone)
self.change_field_value("work", contact.workphone)
self.change_field_value("phone2", contact.secondaryphone)
# choose birth date
if not wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[13]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[13]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").click()
self.change_field_value("byear", contact.year)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def edit_first_contact(self):
self.edit_contact_by_index(0)
def edit_contact_by_index(self, index, new_contact_data):
wd = self.app.wd
# select first contact
self.select_contact_by_index(index)
# init update
self.fill_contact_form(new_contact_data)
# submit update
wd.find_element_by_name("update").click()
wd.find_element_by_css_selector('[href="./"]').click()
self.contact_cache = None
def select_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_css_selector('[title="Edit"]')[index].click()
def select_contact_by_id(self, id):
wd = self.app.wd
wd.find_element_by_css_selector("input[value='%s']" % id).click()
def delete_first_contact(self):
self.delete_contact_by_index(0)
def delete_contact_by_index(self, index):
wd = self.app.wd
# select first contact
self.select_contact_by_index(index)
# submit update
wd.find_element_by_css_selector('[value="Delete"]').click()
# wd.switch_to_alert().accept()
wd.find_element_by_css_selector('[href="./"]').click()
self.contact_cache = None
def delete_contact_by_id(self, id):
wd = self.app.wd
self.select_contact_by_id(id)
# submit deletion
wd.find_element_by_css_selector('[value="Delete"]').click()
wd.switch_to_alert().accept()
wd.find_element_by_css_selector('[href="./"]').click()
self.contact_cache = None
def count(self):
wd = self.app.wd
return len(wd.find_elements_by_css_selector('[name="selected[]"]'))
contact_cache = None
def get_contact_list(self):
if self.contact_cache is None:
wd = self.app.wd
self.contact_cache = []
for row in wd.find_elements_by_name("entry"):
cells = row.find_elements_by_tag_name('td')
first_name = cells[2].text
last_name = cells[1].text
id = cells[0].find_element_by_tag_name('input').get_attribute('value')
all_phones = cells[5].text
all_email = cells[4].text
all_addresses = cells[3].text
self.contact_cache.append(Contact(first_name=first_name, last_name=last_name, id=id,
all_phones_from_home_page=all_phones,
all_email_from_home_page=all_email,
all_addresses_from_home_page=all_addresses))
return list(self.contact_cache)
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
row = wd.find_elements_by_name('entry')[index]
cell = row.find_elements_by_tag_name('td')[7]
cell.find_element_by_tag_name('a').click()
def open_contact_view_by_index(self, index):
wd = self.app.wd
row = wd.find_elements_by_name('entry')[index]
cell = row.find_elements_by_tag_name('td')[6]
cell.find_element_by_tag_name('a').click()
def get_contact_info_from_edit_page(self, index):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
first_name = wd.find_element_by_name('firstname').get_attribute('value')
last_name = wd.find_element_by_name('lastname').get_attribute('value')
id = wd.find_element_by_name('id').get_attribute('value')
phone_home = wd.find_element_by_name('home').get_attribute('value')
mobilephone = wd.find_element_by_name('mobile').get_attribute('value')
secondaryphone = wd.find_element_by_name('phone2').get_attribute('value')
workphone = wd.find_element_by_name('work').get_attribute('value')
e_mail = wd.find_element_by_name('email').get_attribute('value')
e_mail_2 = wd.find_element_by_name('email2').get_attribute('value')
e_mail_3 = wd.find_element_by_name('email3').get_attribute('value')
address = wd.find_element_by_name('address').get_attribute('value')
address2 = wd.find_element_by_name('address2').get_attribute('value')
return Contact(first_name=first_name, last_name=last_name, id=id, phone_home=phone_home,
mobilephone=mobilephone, secondaryphone=secondaryphone, workphone=workphone,
e_mail=e_mail, e_mail_2=e_mail_2, email_3=e_mail_3, address=address, address2=address2)
def get_contact_from_view_page(self, index):
wd = self.app.wd
self.open_contact_view_by_index(index)
text = wd.find_element_by_id("content").text
phone_home = re.search("H: (.*)",text).group(1)
mobilephone = re.search("M: (.*)", text).group(1)
workphone = re.search("W: (.*)", text).group(1)
secondaryphone = re.search("P: (.*)", text).group(1)
return Contact(id=id, phone_home=phone_home,mobilephone=mobilephone,
secondaryphone=secondaryphone, workphone=workphone)
| sabinaczopik/python_training | fixture/contact.py | Python | apache-2.0 | 7,165 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-03 19:17
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dmax_website', '0009_auto_20160103_1911'),
]
operations = [
migrations.RenameField(
model_name='projectitem',
old_name='project_abbreviation',
new_name='abbreviation',
),
]
| maxwelld90/personal_web | django_project/personal_web/dmax_website/migrations/0010_auto_20160103_1917.py | Python | gpl-2.0 | 457 |
# -*- coding: utf-8 -*-
{
'!=': '!=',
'!langcode!': 'it',
'!langname!': 'Italiano',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" è un\'espressione opzionale come "campo1=\'nuovo valore\'". Non si può fare "update" o "delete" dei risultati di un JOIN ',
'%(nrows)s records found': '%(nrows)s record trovati',
'%d seconds ago': '%d secondi fa',
'%s %%{row} deleted': '%s righe ("record") cancellate',
'%s %%{row} updated': '%s righe ("record") modificate',
'%s selected': '%s selezionato',
'%Y-%m-%d': '%d/%m/%Y',
'%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S',
'(**%.0d MB**)': '(**%.0d MB**)',
'**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}',
'**%(items)s** items, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** items, **%(bytes)s** %%{byte(bytes)}',
'**not available** (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)': '**not available** (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)',
'<': '<',
'<=': '<=',
'=': '=',
'>': '>',
'>=': '>=',
'?': '?',
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
'@markmin\x01Number of entries: **%s**': 'Numero di entità: **%s**',
'``**not available**``:red (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)': '``**not available**``:red (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)',
'About': 'About',
'Access Control': 'Controllo Accessi',
'Add': 'Aggiungi',
'admin': 'admin',
'Administrative Interface': 'Interfaccia Amministrativa',
'Administrative interface': 'Interfaccia amministrativa',
'Ajax Recipes': 'Ajax Recipes',
'An error occured, please %s the page': 'È stato rilevato un errore, prego %s la pagina',
'An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
'And': 'E',
'appadmin is disabled because insecure channel': 'Amministrazione (appadmin) disabilitata: comunicazione non sicura',
'Are you sure you want to delete this object?': 'Sicuro di voler cancellare questo oggetto ?',
'Available Databases and Tables': 'Database e tabelle disponibili',
'Back': 'Indietro',
'Buy this book': 'Compra questo libro',
"Buy web2py's book": "Buy web2py's book",
'cache': 'cache',
'Cache': 'Cache',
'Cache Cleared': 'Cache Cleared',
'Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'Non può essere vuoto',
'Change password': 'Cambia Password',
'change password': 'Cambia password',
'Check to delete': 'Seleziona per cancellare',
'Clear': 'Resetta',
'Clear CACHE?': 'Resetta CACHE?',
'Clear DISK': 'Resetta DISK',
'Clear RAM': 'Resetta RAM',
'Client IP': 'Client IP',
'Close': 'Chiudi',
'Cognome': 'Cognome',
'Community': 'Community',
'Components and Plugins': 'Componenti and Plugin',
'Config.ini': 'Config.ini',
'contains': 'contiene',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Created By': 'Creato Da',
'Created On': 'Creato Il',
'CSV': 'CSV',
'CSV (hidden cols)': 'CSV (hidden cols)',
'Current request': 'Richiesta (request) corrente',
'Current response': 'Risposta (response) corrente',
'Current session': 'Sessione (session) corrente',
'customize me!': 'Personalizzami!',
'data uploaded': 'dati caricati',
'Database': 'Database',
'Database %s select': 'Database %s select',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'db': 'db',
'DB Model': 'Modello di DB',
'Delete': 'Cancella',
'Delete:': 'Cancella:',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'Description': 'Descrizione',
'design': 'progetta',
'Design': 'Design',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'Documentation': 'Documentazione',
"Don't know what to do?": 'Non sai cosa fare?',
'done!': 'fatto!',
'Download': 'Download',
'E-mail': 'E-mail',
'Edit': 'Modifica',
'Edit current record': 'Modifica record corrente',
'edit profile': 'modifica profilo',
'Edit This App': 'Modifica questa applicazione',
'Email and SMS': 'Email e SMS',
'Email non valida': 'Email non valida',
'enter a number between %(min)g and %(max)g': 'enter a number between %(min)g and %(max)g',
'enter an integer between %(min)g and %(max)g': 'inserisci un intero tra %(min)g e %(max)g',
'Errors': 'Errori',
'Errors in form, please check it out.': 'Errori nel form, ricontrollalo',
'export as csv file': 'esporta come file CSV',
'Export:': 'Esporta:',
'FAQ': 'FAQ',
'First name': 'Nome',
'Forgot username?': 'Dimenticato lo username?',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Graph Model': 'Graph Model',
'Group %(group_id)s created': 'Group %(group_id)s created',
'Group ID': 'ID Gruppo',
'Group uniquely assigned to user %(id)s': 'Group uniquely assigned to user %(id)s',
'Groups': 'Groups',
'hello': 'hello',
'hello world': 'salve mondo',
'Hello World': 'Salve Mondo',
'Hello World in a flash!': 'Salve Mondo in un flash!',
'Helping web2py': 'Helping web2py',
'Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})': 'Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})',
'Home': 'Home',
'How did you get here?': 'Come sei arrivato qui?',
'HTML': 'HTML',
'import': 'importa',
'Import/Export': 'Importa/Esporta',
'Index': 'Indice',
'insert new': 'inserisci nuovo',
'insert new %s': 'inserisci nuovo %s',
'Internal State': 'Stato interno',
'Introduction': 'Introduzione',
'Invalid email': 'Email non valida',
'Invalid login': 'Login non valido',
'Invalid Query': 'Richiesta (query) non valida',
'invalid request': 'richiesta non valida',
'Is Active': "E' attivo",
'Key': 'Chiave',
'Last name': 'Cognome',
'Layout': 'Layout',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'Log In': 'Log In',
'Logged in': 'Loggato',
'Logged out': 'Disconnesso',
'login': 'accesso',
'Login': 'Login',
'logout': 'uscita',
'Logout': 'Logout',
'Lost Password': 'Password Smarrita',
'Lost password?': 'Password smarrita?',
'lost password?': 'dimenticato la password?',
'Main Menu': 'Menu principale',
'Manage %(action)s': 'Manage %(action)s',
'Manage Access Control': 'Manage Access Control',
'Manage Cache': 'Manage Cache',
'Memberships': 'Memberships',
'Menu Model': 'Menu Modelli',
'Modified By': 'Modificato da',
'Modified On': 'Modificato il',
'My Sites': 'My Sites',
'Name': 'Nome',
'New': 'Nuovo',
'New password': 'Nuova password',
'New Record': 'Nuovo elemento (record)',
'new record inserted': 'nuovo record inserito',
'next %s rows': 'next %s rows',
'next 100 rows': 'prossime 100 righe',
'No databases in this application': 'Nessun database presente in questa applicazione',
'No records found': 'Nessun record trovato',
'Nome': 'Nome',
'Non può essere vuoto': 'Non può essere vuoto',
'not authorized': 'non autorizzato',
'Number of entries: **%s**': 'Number of entries: **%s**',
'Object or table name': 'Oggeto o nome tabella',
'Old password': 'Vecchia password',
'Online book': 'Online book',
'Online examples': 'Vedere gli esempi',
'Or': 'O',
'or import from csv file': 'oppure importa da file CSV',
'Origin': 'Origine',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Password': 'Password',
"Password fields don't match": 'I campi password non sono uguali',
'Permission': 'Permission',
'Permissions': 'Permissions',
'please input your password again': 'perfavore reimmeti la tua password',
'Plugins': 'Plugins',
'Powered by': 'Powered by',
'Preface': 'Preface',
'previous %s rows': 'previous %s rows',
'previous 100 rows': '100 righe precedenti',
'Profile': 'Profilo',
'pygraphviz library not found': 'pygraphviz library not found',
'Python': 'Python',
'Query:': 'Richiesta (query):',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'Recipes': 'Recipes',
'Record': 'Record',
'record does not exist': 'il record non esiste',
'Record ID': 'Record ID',
'Record id': 'Record id',
'Register': 'Registrati',
'register': 'registrazione',
'Registration identifier': 'Registration identifier',
'Registration key': 'Chiave di Registazione',
'Registration successful': 'Registrazione avvenuta',
'reload': 'reload',
'Remember me (for 30 days)': 'Ricordami (per 30 giorni)',
'Request reset password': 'Richiedi il reset della password',
'Reset Password key': 'Resetta chiave Password ',
'Role': 'Ruolo',
'Roles': 'Roles',
'Rows in Table': 'Righe nella tabella',
'Rows selected': 'Righe selezionate',
'Save model as...': 'Salva modello come...',
'Save profile': 'Salva profilo',
'Search': 'Ricerca',
'Semantic': 'Semantic',
'Services': 'Servizi',
'Sign Up': 'Sign Up',
'Size of cache:': 'Size of cache:',
'starts with': 'comincia con',
'state': 'stato',
'Statistics': 'Statistics',
'Stylesheet': 'Foglio di stile (stylesheet)',
'submit': 'Inviai',
'Submit': 'Invia',
'Support': 'Support',
'Sure you want to delete this object?': 'Vuoi veramente cancellare questo oggetto?',
'Table': 'tabella',
'Table name': 'Nome tabella',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La richiesta (query) è una condizione come ad esempio "db.tabella1.campo1==\'valore\'". Una condizione come "db.tabella1.campo1==db.tabella2.campo2" produce un "JOIN" SQL.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'L\'output del file è un "dictionary" che è stato visualizzato dalla vista %s',
'The Views': 'The Views',
'This App': 'This App',
'This email already has an account': 'This email already has an account',
'This is a copy of the scaffolding application': "Questa è una copia dell'applicazione di base (scaffold)",
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Ora (timestamp)',
'too short': 'troppo corto',
'Traceback': 'Traceback',
'TSV (Excel compatible)': 'TSV (Excel compatibile)',
'TSV (Excel compatible, hidden cols)': 'TSV (Excel compatibile, hidden cols)',
'Twitter': 'Twitter',
'unable to parse csv file': 'non riesco a decodificare questo file CSV',
'Update': 'Aggiorna',
'Update:': 'Aggiorna:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Per costruire richieste (query) più complesse si usano (...)&(...) come "e" (AND), (...)|(...) come "o" (OR), e ~(...) come negazione (NOT).',
'User': 'User',
'User %(id)s Logged-in': 'User %(id)s Logged-in',
'User %(id)s Logged-out': 'User %(id)s Logged-out',
'User %(id)s Password changed': 'User %(id)s Password changed',
'User %(id)s Password reset': 'User %(id)s Password reset',
'User %(id)s Profile updated': 'User %(id)s Profile updated',
'User %(id)s Registered': 'User %(id)s Registered',
'User ID': 'ID Utente',
'Users': 'Users',
'value already in database or empty': 'valore già presente nel database o vuoto',
'Verify Password': 'Verifica Password',
'Videos': 'Videos',
'View': 'Vista',
'Welcome': 'Benvenuto',
'Welcome %s': 'Benvenuto %s',
'Welcome to web2py': 'Benvenuto su web2py',
'Welcome to web2py!': 'Benvenuto in web2py!',
'Which called the function %s located in the file %s': 'che ha chiamato la funzione %s presente nel file %s',
'Working...': 'Working...',
'XML': 'XML',
'You are successfully running web2py': 'Stai eseguendo web2py con successo',
'You can modify this application and adapt it to your needs': 'Puoi modificare questa applicazione adattandola alle tue necessità',
'You visited the url %s': "Hai visitato l'URL %s",
}
| dbmi-pitt/EvidenceType-Calculator | source/evidence_type_calculator/languages/it.py | Python | apache-2.0 | 12,432 |
from django.core.validators import RegexValidator
from django.db import models
from event.models import Event
class Job(models.Model):
id = models.AutoField(primary_key=True)
event = models.ForeignKey(Event)
name = models.CharField(
max_length=75,
validators=[
RegexValidator(
r'^[(A-Z)|(a-z)|(\s)|(\')]+$',
),
],
)
start_date = models.DateField()
end_date = models.DateField()
description = models.TextField(
blank=True,
validators=[
RegexValidator(
r'^[(A-Z)|(a-z)|(0-9)|(\s)|(\.)|(,)|(\-)|(!)|(\')]+$',
),
],
)
| willingc/vms | vms/job/models.py | Python | gpl-2.0 | 675 |
#!/usr/bin/python3
"""
Script to check recently uploaded files.
This script checks if a file description is present and if there are other
problems in the image's description.
This script will have to be configured for each language. Please submit
translations as addition to the Pywikibot framework.
Everything that needs customisation is indicated by comments.
This script understands the following command-line arguments:
-limit The number of images to check (default: 80)
-commons The bot will check if an image on Commons has the same name
and if true it reports the image.
-duplicates[:#] Checking if the image has duplicates (if arg, set how many
rollback wait before reporting the image in the report
instead of tag the image) default: 1 rollback.
-duplicatesreport Report the duplicates in a log *AND* put the template in
the images.
-maxusernotify Maximum notifications added to a user talk page in a single
check, to avoid email spamming.
-sendemail Send an email after tagging.
-break To break the bot after the first check (default: recursive)
-sleep[:#] Time in seconds between repeat runs (default: 30)
-wait[:#] Wait x second before check the images (default: 0)
-skip[:#] The bot skip the first [:#] images (default: 0)
-start[:#] Use allimages() as generator
(it starts already from File:[:#])
-cat[:#] Use a category as generator
-regex[:#] Use regex, must be used with -url or -page
-page[:#] Define the name of the wikipage where are the images
-url[:#] Define the url where are the images
-nologerror If given, this option will disable the error that is risen
when the log is full.
Instructions for the real-time settings.
For every new block you have to add:
<------- ------->
In this way the bot can understand where the block starts in order to take the
right parameter.
* Name= Set the name of the block
* Find= search this text in the image's description
* Findonly= search for exactly this text in the image's description
* Summary= That's the summary that the bot will use when it will notify the
problem.
* Head= That's the incipit that the bot will use for the message.
* Text= This is the template that the bot will use when it will report the
image's problem.
Todo
----
* Clean the code, some passages are pretty difficult to understand.
* Add the "catch the language" function for commons.
* Fix and reorganise the new documentation
* Add a report for the image tagged.
"""
#
# (C) Pywikibot team, 2006-2022
#
# Distributed under the terms of the MIT license.
#
import collections
import re
import time
from typing import Generator
import pywikibot
from pywikibot import config, i18n
from pywikibot import pagegenerators as pg
from pywikibot.backports import List, Tuple
from pywikibot.bot import suggest_help
from pywikibot.exceptions import (
EditConflictError,
Error,
IsRedirectPageError,
LockedPageError,
NoPageError,
NotEmailableError,
PageRelatedError,
PageSaveRelatedError,
ServerError,
TranslationError,
)
from pywikibot.family import Family
from pywikibot.site import Namespace
###############################################################################
# <--------------------------- Change only below! --------------------------->#
###############################################################################
# NOTE: in the messages used by the bot if you put __botnick__ in the text, it
# will automatically replaced with the bot's nickname.
# That's what you want that will be added. (i.e. the {{no source}} with the
# right day/month/year )
N_TXT = {
'commons': '{{subst:nld}}',
'meta': '{{No license}}',
'test': '{{No license}}',
'ar': '{{subst:ملم}}',
'arz': '{{subst:ملم}}',
'de': '{{Dateiüberprüfung}}',
'en': '{{subst:nld}}',
'fa': '{{subst:حق تکثیر تصویر نامعلوم}}',
'fr': '{{subst:lid}}',
'ga': '{{subst:Ceadúnas de dhíth}}',
'hr': '{{Bez licence}}',
'hu': '{{nincslicenc|~~~~~}}',
'it': '{{subst:unverdata}}',
'ja': '{{subst:Nld}}',
'ko': '{{subst:nld}}',
'ru': '{{subst:nld}}',
'sd': '{{subst:اجازت نامعلوم}}',
'sr': '{{subst:датотека без лиценце}}',
'ta': '{{subst:nld}}',
'ur': '{{subst:حقوق نسخہ تصویر نامعلوم}}',
'zh': '{{subst:No license/auto}}',
}
# Text that the bot will try to see if there's already or not. If there's a
# {{ I'll use a regex to make a better check.
# This will work so:
# '{{no license' --> '\{\{(?:template:)?no[ _]license ?(?:\||\n|\}|/) ?' (case
# insensitive).
# If there's not a {{ it will work as usual (if x in Text)
TXT_FIND = {
'commons': ['{{no license', '{{no license/en',
'{{nld', '{{no permission', '{{no permission since'],
'meta': ['{{no license', '{{nolicense', '{{nld'],
'test': ['{{no license'],
'ar': ['{{لت', '{{لا ترخيص'],
'arz': ['{{nld', '{{no license'],
'de': ['{{DÜP', '{{Düp', '{{Dateiüberprüfung'],
'en': ['{{nld', '{{no license'],
'fa': ['{{حق تکثیر تصویر نامعلوم۲'],
'ga': ['{{Ceadúnas de dhíth', '{{Ceadúnas de dhíth'],
'hr': ['{{bez licence'],
'hu': ['{{nincsforrás', '{{nincslicenc'],
'it': ['{{unverdata', '{{unverified'],
'ja': ['{{no source', '{{unknown',
'{{non free', '<!--削除についての議論が終了するまで'],
'ko': ['{{출처 없음', '{{라이선스 없음', '{{Unknown'],
'ru': ['{{no license'],
'sd': ['{{ناحوالا', '{{ااجازت نامعلوم', '{{Di-no'],
'sr': ['{{датотека без лиценце', '{{датотека без извора'],
'ta': ['{{no source', '{{nld', '{{no license'],
'ur': ['{{ناحوالہ', '{{اجازہ نامعلوم', '{{Di-no'],
'zh': ['{{no source', '{{unknown', '{{No license'],
}
# When the bot find that the usertalk is empty is not pretty to put only the
# no source without the welcome, isn't it?
EMPTY = {
'commons': '{{subst:welcome}}\n~~~~\n',
'meta': '{{subst:Welcome}}\n~~~~\n',
'ar': '{{subst:أهلا ومرحبا}}\n~~~~\n',
'arz': '{{subst:اهلا و سهلا}}\n~~~~\n',
'de': '{{subst:willkommen}} ~~~~',
'en': '{{subst:welcome}}\n~~~~\n',
'fa': '{{subst:خوشامدید|%s}}',
'fr': '{{Bienvenue nouveau\n~~~~\n',
'ga': '{{subst:Fáilte}} - ~~~~\n',
'hr': '{{subst:dd}}--~~~~\n',
'hu': '{{subst:Üdvözlet|~~~~}}\n',
'it': '<!-- inizio template di benvenuto -->\n{{subst:Benvebot}}\n~~~~\n'
'<!-- fine template di benvenuto -->',
'ja': '{{subst:Welcome/intro}}\n{{subst:welcome|--~~~~}}\n',
'ko': '{{환영}}--~~~~\n',
'ru': '{{subst:Приветствие}}\n~~~~\n',
'sd': '{{ڀليڪار}}\n~~~~\n',
'sr': '{{dd}}--~~~~\n',
'ta': '{{welcome}}\n~~~~\n',
'ur': '{{خوش آمدید}}\n~~~~\n',
'zh': '{{subst:welcome|sign=~~~~}}',
}
# if the file has an unknown extension it will be tagged with this template.
# In reality, there aren't unknown extension, they are only not allowed...
DELETE_IMMEDIATELY = {
'commons': '{{speedy|The file has .%s as extension. '
'Is it ok? Please check.}}',
'meta': '{{Delete|The file has .%s as extension.}}',
'ar': '{{شطب|الملف له .%s كامتداد.}}',
'arz': '{{مسح|الملف له .%s كامتداد.}}',
'en': '{{db-meta|The file has .%s as extension.}}',
'fa': '{{حذف سریع|تصویر %s اضافی است.}}',
'ga': '{{scrios|Tá iarmhír .%s ar an comhad seo.}}',
'hu': '{{azonnali|A fájlnak .%s a kiterjesztése}}',
'it': '{{cancella subito|motivo=Il file ha come estensione ".%s"}}',
'ja': '{{db|知らないファイルフォーマット %s}}',
'ko': '{{delete|잘못된 파일 형식 (.%s)}}',
'ru': '{{db-badimage}}',
'sr': '{{speedy|Ова датотека садржи екстензију %s. '
'Молим вас да проверите да ли је у складу са правилима.}}',
'ta': '{{delete|'
'இந்தக் கோப்பு .%s என்றக் கோப்பு நீட்சியைக் கொண்டுள்ளது.}}',
'ur': '{{سریع حذف شدگی|اس ملف میں .%s بطور توسیع موجود ہے۔ }}',
'zh': '{{delete|未知檔案格式%s}}',
}
# That's the text that the bot will add if it doesn't find the license.
# Note: every __botnick__ will be repleaced with your bot's nickname
# (feel free not to use if you don't need it)
NOTHING_NOTIFICATION = {
'commons': "\n{{subst:User:Filnik/untagged|File:%s}}\n\n''This message "
"was '''added automatically by ~~~''', if you need "
'some help about it, please read the text above again and '
'follow the links in it, if you still need help ask at the '
'[[File:Human-help-browser.svg|18px|link=Commons:Help desk|?]] '
"'''[[Commons:Help desk|->]][[Commons:Help desk]]''' in any "
"language you like to use.'' --~~~~",
'meta': '{{subst:No license notice|File:%s}}',
'ar': '{{subst:مصدر الملف|File:%s}} --~~~~',
'arz': '{{subst:file source|File:%s}} --~~~~',
'en': '{{subst:file source|File:%s}} --~~~~',
'fa': '{{subst:اخطار نگاره|%s}}',
'ga': '{{subst:Foinse na híomhá|File:%s}} --~~~~',
'hu': '{{subst:adjforrást|Kép:%s}}\n Ezt az üzenetet ~~~ automatikusan '
'helyezte el a vitalapodon, kérdéseddel fordulj a gazdájához, vagy '
'a [[WP:KF|Kocsmafalhoz]]. --~~~~',
'it': '{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/Senza licenza|'
'%s|~~~}} --~~~~',
'ja': '\n{{subst:Image copyright|File:%s}}--~~~~',
'ko': '\n{{subst:User:Kwjbot IV/untagged|%s}} --~~~~',
'ru': '{{subst:Запрос о статусе файла|Файл:%s}} --~~~~',
'sr': '\n{{subst:Обавештење о датотеци без лиценце|%s}} --~~~~',
'sd': '{{subst:تصوير جو ذريعو|File:%s}}--~~~~',
'ta': '\n{{subst:Di-no license-notice|படிமம்:%s}} ~~~~',
'ur': '{{subst:ماخذ تصویر|File:%s}}--~~~~',
'zh': '\n{{subst:Uploadvionotice|File:%s}} ~~~~',
}
# This is a list of what bots used this script in your project.
# NOTE: YOUR bot username will be automatically added.
BOT_LIST = {
'commons': ['Siebot', 'CommonsDelinker', 'Filbot', 'Sz-iwbot',
'ABFbot'],
'meta': ['MABot'],
'ar': ['MenoBot'],
'arz': ['MenoBot'],
'de': ['Xqbot'],
'en': ['OrphanBot'],
'fa': ['Amirobot'],
'ga': ['AllieBot'],
'it': ['Filbot', 'Nikbot', '.snoopybot.'],
'ja': ['Alexbot'],
'ko': ['Kwjbot IV'],
'ru': ['Rubinbot'],
'sr': ['KizuleBot'],
'ta': ['TrengarasuBOT'],
'ur': ['Shuaib-bot', 'Tahir-bot', 'SAMI.Bot'],
'zh': ['Alexbot'],
}
# The message that the bot will add the second time that find another license
# problem.
SECOND_MESSAGE_WITHOUT_LICENSE = {
'hu': '\nSzia! Úgy tűnik a [[:Kép:%s]] képpel is hasonló a probléma, '
'mint az előbbivel. Kérlek olvasd el a [[WP:KÉPLIC|feltölthető '
'képek]]ről szóló oldalunk, és segítségért fordulj a [[WP:KF-JO|'
'Jogi kocsmafalhoz]]. Köszönöm --~~~~',
'it': ':{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/Senza'
'licenza2|%s|~~~}} --~~~~',
}
# You can add some settings to a wiki page. In this way, you can change them
# without touching the code. That's useful if you are running the bot on
# Toolserver.
PAGE_WITH_SETTINGS = {
'commons': 'User:Filbot/Settings',
'it': 'Progetto:Coordinamento/Immagini/Bot/Settings#Settings',
'sr': 'User:KizuleBot/checkimages.py/подешавања',
'zh': 'User:Alexbot/cisettings#Settings',
}
# The bot can report some images (like the images that have the same name of an
# image on commons) This is the page where the bot will store them.
REPORT_PAGE = {
'commons': 'User:Filbot/Report',
'meta': 'User:MABot/Report',
'test': 'User:Pywikibot-test/Report',
'ar': 'User:MenoBot/Report',
'arz': 'User:MenoBot/Report',
'de': 'Benutzer:Xqbot/Report',
'en': 'User:Filnik/Report',
'fa': 'کاربر:Amirobot/گزارش تصویر',
'ga': 'User:AllieBot/ReportImages',
'hu': 'User:Bdamokos/Report',
'it': 'Progetto:Coordinamento/Immagini/Bot/Report',
'ja': 'User:Alexbot/report',
'ko': 'User:Kwjbot IV/Report',
'ru': 'User:Rubinbot/Report',
'sd': 'واپرائيندڙ:Kaleem Bhatti/درخواست تصوير',
'sr': 'User:KizuleBot/checkimages.py/дневник',
'ta': 'User:Trengarasu/commonsimages',
'ur': 'صارف:محمد شعیب/درخواست تصویر',
'zh': 'User:Alexsh/checkimagereport',
}
# If a template isn't a license but it's included on a lot of images, that can
# be skipped to analyze the image without taking care of it. (the template must
# be in a list)
# Warning: Don't add template like "en, de, it" because they are already in
# (added in the code, below
# Warning 2: The bot will use regex, make the names compatible, please (don't
# add "Template:" or {{because they are already put in the regex).
# Warning 3: the part that use this regex is case-insensitive (just to let you
# know..)
HIDDEN_TEMPLATE = {
# Put the other in the page on the project defined below
'commons': ['Template:Information'],
'meta': ['Template:Information'],
'test': ['Template:Information'],
'ar': ['Template:معلومات'],
'arz': ['Template:معلومات'],
'de': ['Template:Information'],
'en': ['Template:Information'],
'fa': ['الگو:اطلاعات'],
'fr': ['Template:Information'],
'ga': ['Template:Information'],
'hr': ['Template:Infoslika'],
'hu': ['Template:Információ', 'Template:Enwiki', 'Template:Azonnali'],
'it': ['Template:EDP', 'Template:Informazioni file',
'Template:Information', 'Template:Trademark',
'Template:Permissionotrs'],
'ja': ['Template:Information'],
'ko': ['Template:그림 정보'],
'ru': ['Template:Изображение',
'Template:Обоснование добросовестного использования'],
'sd': ['Template:معلومات'],
'sr': ['Шаблон:Информација', 'Шаблон:Non-free use rationale 2'],
'ta': ['Template:Information'],
'ur': ['Template:معلومات'],
'zh': ['Template:Information'],
}
# A page where there's a list of template to skip.
PAGE_WITH_HIDDEN_TEMPLATES = {
'commons': 'User:Filbot/White_templates#White_templates',
'it': 'Progetto:Coordinamento/Immagini/Bot/WhiteTemplates',
'ko': 'User:Kwjbot_IV/whitetemplates/list',
'sr': 'User:KizuleBot/checkimages.py/дозвољенишаблони',
}
# A page where there's a list of template to consider as licenses.
PAGE_WITH_ALOWED_TEMPLATES = {
'commons': 'User:Filbot/Allowed templates',
'de': 'Benutzer:Xqbot/Lizenzvorlagen',
'it': 'Progetto:Coordinamento/Immagini/Bot/AllowedTemplates',
'ko': 'User:Kwjbot_IV/AllowedTemplates',
'sr': 'User:KizuleBot/checkimages.py/дозвољенишаблони',
}
# Template added when the bot finds only an hidden template and nothing else.
# Note: every __botnick__ will be repleaced with your bot's nickname
# (feel free not to use if you don't need it)
HIDDEN_TEMPALTE_NOTIFICATION = {
'commons': ("\n{{subst:User:Filnik/whitetemplate|File:%s}}\n\n''This "
'message was added automatically by ~~~, if you need '
'some help about it please read the text above again and '
'follow the links in it, if you still need help ask at the '
'[[File:Human-help-browser.svg|18px|link=Commons:Help desk|?]]'
" '''[[Commons:Help desk|→]] [[Commons:Help desk]]''' in any "
"language you like to use.'' --~~~~"),
'it': '{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/'
'Template_insufficiente|%s|~~~}} --~~~~',
'ko': '\n{{subst:User:Kwj2772/whitetemplates|%s}} --~~~~',
}
# In this part there are the parameters for the dupe images.
# Put here the template that you want to put in the image to warn that it's a
# dupe. put __image__ if you want only one image, __images__ if you want the
# whole list
DUPLICATES_TEXT = {
'commons': '\n{{Dupe|__image__}}',
'de': '{{NowCommons}}',
'it': '\n{{Progetto:Coordinamento/Immagini/Bot/Template duplicati|'
'__images__}}',
'ru': '{{NCT|__image__}}',
'sr': '{{NowCommons|__image__}}',
}
# Message to put in the talk
DUPLICATES_USER_TALK_TEXT = {
'it': '{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/Duplicati|'
'%s|%s|~~~}} --~~~~',
}
# Regex to detect the template put in the image's description to find the dupe
DUPLICATES_REGEX = {
'commons': r'\{\{(?:[Tt]emplate:|)(?:[Dd]up(?:licat|)e|[Bb]ad[ _][Nn]ame)'
r'[|}]',
'de': r'\{\{[nN](?:C|ow(?: c|[cC])ommons)[\|\}',
'it': r'\{\{(?:[Tt]emplate:|)[Pp]rogetto:[Cc]oordinamento/Immagini/Bot/'
r'Template duplicati[|}]',
'sr': r'\{\{[nN](?:C|ow(?: c|[cC])ommons)[\|\}',
}
# Category with the licenses and / or with subcategories with the other
# licenses.
CATEGORY_WITH_LICENSES = {
'commons': 'Category:License tags',
'meta': 'Category:License templates',
'test': 'Category:CC license tags',
'ar': 'تصنيف:قوالب حقوق الصور',
'arz': 'تصنيف:Wikipedia image copyright templates',
'de': 'Kategorie:Vorlage:Lizenz für Bilder',
'en': 'Category:Wikipedia file copyright templates',
'fa': 'رده:الگو:حق تکثیر پرونده',
'ga': "Catagóir:Clibeanna cóipchirt d'íomhánna",
'it': 'Categoria:Template Licenze copyright',
'ja': 'Category:画像の著作権表示テンプレート',
'ko': '분류:위키백과 그림 저작권 틀',
'ru': 'Category:Шаблоны:Лицензии файлов',
'sd': 'زمرو:وڪيپيڊيا فائل ڪاپي رائيٽ سانچا',
'sr': 'Категорија:Шаблони за слике',
'ta': 'Category:காப்புரிமை வார்ப்புருக்கள்',
'ur': 'زمرہ:ویکیپیڈیا سانچہ جات حقوق تصاویر',
'zh': 'Category:版權申告模板',
}
# Page where is stored the message to send as email to the users
EMAIL_PAGE_WITH_TEXT = {
# 'de': 'Benutzer:ABF/D3',
}
# Title of the email
EMAIL_SUBJECT = {
# 'de': 'Problemen mit Deinem Bild auf der Deutschen Wikipedia',
}
# Seems that uploader bots aren't interested to get messages regarding the
# files that they upload.. strange, uh?
# Format: [[user,regex], [user,regex]...] the regex is needed to match the user
# where to send the warning-msg
UPLOAD_BOTS = {
'commons': [['File Upload Bot (Magnus Manske)',
r'\|[Ss]ource=Transferred from .*?; '
r'transferred to Commons by \[\[User:(.*?)\]\]']],
}
# Service images that don't have to be deleted and/or reported has a template
# inside them (you can let this param as None)
SERVICE_TEMPLATES = {
'it': ['Template:Immagine di servizio'],
}
# Add your project (in alphabetical order) if you want that the bot starts
PROJECT_INSERTED = ['ar', 'arz', 'commons', 'de', 'en', 'fa', 'ga', 'hu', 'it',
'ja', 'ko', 'ru', 'meta', 'sd', 'sr', 'ta', 'test', 'ur',
'zh']
# END OF CONFIGURATION.
SETTINGS_REGEX = re.compile(r"""
<-------\ ------->\n
\*[Nn]ame\ ?=\ ?['"](.*?)['"]\n
\*([Ff]ind|[Ff]indonly)\ ?=\ ?(.*?)\n
\*[Ii]magechanges\ ?=\ ?(.*?)\n
\*[Ss]ummary\ ?=\ ?['"](.*?)['"]\n
\*[Hh]ead\ ?=\ ?['"](.*?)['"]\n
\*[Tt]ext\ ?=\ ?['"](.*?)['"]\n
\*[Mm]ex\ ?=\ ?['"]?([^\n]*?)['"]?\n
""", re.DOTALL | re.VERBOSE)
class LogIsFull(Error):
"""Log is full and the bot cannot add other data to prevent Errors."""
def print_with_time_zone(message) -> None:
"""Print the messages followed by the TimeZone encoded correctly."""
time_zone = time.strftime('%d %b %Y %H:%M:%S (UTC)', time.gmtime())
pywikibot.output('{} {}'.format(message.rstrip(), time_zone))
class CheckImagesBot:
"""A robot to check recently uploaded files."""
ignore_save_related_errors = True
ignore_server_errors = False
def __init__(
self,
site,
log_full_number: int = 25000,
sendemail_active: bool = False,
duplicates_report: bool = False,
log_full_error: bool = True,
max_user_notify=None
) -> None:
"""Initializer, define some instance variables."""
self.site = site
self.log_full_error = log_full_error
self.log_full_number = log_full_number
self.rep_page = i18n.translate(self.site, REPORT_PAGE)
if not self.rep_page:
raise TranslationError(
'No report page provided in "REPORT_PAGE" dict '
'for your project!')
self.image_namespace = site.namespaces.FILE.custom_name + ':'
self.list_entry = '\n* [[:{}%s]] '.format(self.image_namespace)
# The summary of the report
self.com = i18n.twtranslate(self.site, 'checkimages-log-comment')
hiddentemplates_raw = i18n.translate(self.site, HIDDEN_TEMPLATE)
if not hiddentemplates_raw:
raise TranslationError(
'No non-license templates provided in "HIDDEN_TEMPLATE" dict '
'for your project!')
self.hiddentemplates = {
pywikibot.Page(self.site, tmp, ns=self.site.namespaces.TEMPLATE)
for tmp in hiddentemplates_raw}
self.page_hidden = i18n.translate(self.site,
PAGE_WITH_HIDDEN_TEMPLATES)
self.page_allowed = i18n.translate(self.site,
PAGE_WITH_ALOWED_TEMPLATES)
self.comment = i18n.twtranslate(self.site.lang,
'checkimages-source-tag-comment')
# Adding the bot's nickname at the notification text if needed.
self.bots = i18n.translate(self.site, BOT_LIST)
if self.bots:
self.bots.append(site.username())
else:
self.bots = [site.username()]
self.sendemail_active = sendemail_active
self.skip_list = []
self.duplicates_report = duplicates_report
if max_user_notify:
self.num_notify = collections.defaultdict(lambda: max_user_notify)
else:
self.num_notify = None
# Load the licenses only once, so do it once
self.list_licenses = self.load_licenses()
def set_parameters(self, image) -> None:
"""Set parameters."""
# ensure we have a FilePage
self.image = pywikibot.FilePage(image)
self.image_name = image.title(with_ns=False)
self.timestamp = None
self.uploader = None
def report(
self,
newtext,
image_to_report,
notification=None,
head=None,
notification2=None,
unver: bool = True,
comm_talk=None,
comm_image=None
) -> None:
"""Function to make the reports easier."""
self.image_to_report = image_to_report
self.newtext = newtext
if not newtext:
raise TranslationError(
'No no-license template provided in "N_TXT" dict '
'for your project!')
self.head = head or ''
self.notification = notification
self.notification2 = notification2
if self.notification:
self.notification = re.sub(r'__botnick__', self.site.username(),
notification)
if self.notification2:
self.notification2 = re.sub(r'__botnick__', self.site.username(),
notification2)
self.comm_talk = comm_talk
self.comm_image = comm_image or self.comment
image_tagged = False
try:
image_tagged = self.tag_image(unver)
except NoPageError:
pywikibot.output('The page has been deleted! Skip!')
except EditConflictError:
pywikibot.output('Edit conflict! Skip!')
if image_tagged and self.notification:
try:
self.put_mex_in_talk()
except EditConflictError:
pywikibot.output('Edit Conflict! Retrying...')
try:
self.put_mex_in_talk()
except Exception:
pywikibot.exception()
pywikibot.output(
'Another error... skipping the user...')
def upload_bot_change_function(
self,
report_page_text,
upload_bot_array
) -> str:
"""Detect the user that has uploaded the file through upload bot."""
regex = upload_bot_array[1]
results = re.findall(regex, report_page_text)
if results:
luser = results[0]
return luser
# we can't find the user, report the problem to the bot
return upload_bot_array[0]
def tag_image(self, put: bool = True) -> bool:
"""Add template to the Image page and find out the uploader."""
# Get the image's description
report_page_object = pywikibot.FilePage(self.site,
self.image_to_report)
try:
report_page_text = report_page_object.get()
except NoPageError:
pywikibot.output(self.image_name + ' has been deleted...')
return False
# You can use this function also to find only the user that
# has upload the image (FixME: Rewrite a bit this part)
if put:
pywikibot.showDiff(report_page_text,
self.newtext + '\n' + report_page_text)
pywikibot.output(self.comm_image)
try:
report_page_object.put(self.newtext + '\n' + report_page_text,
summary=self.comm_image)
except LockedPageError:
pywikibot.output('File is locked. Skipping.')
return False
# paginetta it's the image page object.
try:
if report_page_object == self.image and self.uploader:
nick = self.uploader
else:
nick = report_page_object.latest_file_info.user
except PageRelatedError:
pywikibot.output(
'Seems that {} has only the description and not the file...'
.format(self.image_to_report))
repme = self.list_entry + "problems '''with the APIs'''"
self.report_image(self.image_to_report, self.rep_page, self.com,
repme)
return False
upload_bots = i18n.translate(self.site, UPLOAD_BOTS)
user = pywikibot.User(self.site, nick)
luser = user.title(as_url=True)
if upload_bots:
for upload_bot in upload_bots:
if upload_bot[0] == luser:
luser = self.upload_bot_change_function(report_page_text,
upload_bot)
user = pywikibot.User(self.site, luser)
self.talk_page = user.getUserTalkPage()
self.luser = luser
return True
def put_mex_in_talk(self) -> None:
"""Function to put the warning in talk page of the uploader."""
commento2 = i18n.twtranslate(self.site.lang,
'checkimages-source-notice-comment')
email_page_name = i18n.translate(self.site, EMAIL_PAGE_WITH_TEXT)
email_subj = i18n.translate(self.site, EMAIL_SUBJECT)
if self.notification2:
self.notification2 %= self.image_to_report
else:
self.notification2 = self.notification
second_text = False
# Getting the talk page's history, to check if there is another
# advise...
try:
testoattuale = self.talk_page.get()
history = list(self.talk_page.revisions(total=10))
latest_user = history[0]['user']
pywikibot.output(
'The latest user that has written something is: '
+ latest_user)
# A block to prevent the second message if the bot also
# welcomed users...
if latest_user in self.bots and len(history) > 1:
second_text = True
except IsRedirectPageError:
pywikibot.output(
'The user talk is a redirect, trying to get the right talk...')
try:
self.talk_page = self.talk_page.getRedirectTarget()
testoattuale = self.talk_page.get()
except NoPageError:
testoattuale = i18n.translate(self.site, EMPTY)
except NoPageError:
pywikibot.output('The user page is blank')
testoattuale = i18n.translate(self.site, EMPTY)
if self.comm_talk:
commentox = self.comm_talk
else:
commentox = commento2
if second_text:
new_text = '{}\n\n{}'.format(testoattuale, self.notification2)
else:
new_text = '{}\n\n== {} ==\n{}'.format(testoattuale, self.head,
self.notification)
# Check maximum number of notifications for this talk page
if (self.num_notify is not None
and self.num_notify[self.talk_page.title()] == 0):
pywikibot.output('Maximum notifications reached, skip.')
return
try:
self.talk_page.put(new_text, summary=commentox, minor=False)
except PageSaveRelatedError as e:
if not self.ignore_save_related_errors:
raise
err = e
except ServerError as e:
if not self.ignore_server_errors:
raise
err = e
else:
if self.num_notify is not None:
self.num_notify[self.talk_page.title()] -= 1
err = None
if err:
pywikibot.exception(err)
pywikibot.output('Skipping saving talk page {}'
.format(self.talk_page))
if email_page_name and email_subj:
email_page = pywikibot.Page(self.site, email_page_name)
try:
email_text = email_page.get()
except (NoPageError, IsRedirectPageError):
return
if self.sendemail_active:
text_to_send = re.sub(r'__user-nickname__', r'{}'
.format(self.luser), email_text)
email_class = pywikibot.User(self.site, self.luser)
try:
email_class.send_email(email_subj, text_to_send)
except NotEmailableError:
pywikibot.output('User is not mailable, aborted')
def regex_generator(self, regexp, textrun) -> Generator[pywikibot.FilePage,
None, None]:
"""Find page to yield using regex to parse text."""
regex = re.compile(r'{}'.format(regexp), re.DOTALL)
results = regex.findall(textrun)
for image in results:
yield pywikibot.FilePage(self.site, image)
def load_hidden_templates(self) -> None:
"""Function to load the white templates."""
# A template as {{en is not a license! Adding also them in the
# whitelist template...
for key in Family.load('wikipedia').langs.keys():
self.hiddentemplates.add(pywikibot.Page(
self.site, 'Template:{}'.format(key)))
# Hidden template loading
if self.page_hidden:
try:
page_hidden_text = pywikibot.Page(self.site,
self.page_hidden).get()
except (NoPageError, IsRedirectPageError):
page_hidden_text = ''
for element in self.load(page_hidden_text):
self.hiddentemplates.add(pywikibot.Page(self.site, element))
def important_image(self, list_given) -> pywikibot.FilePage:
"""
Get tuples of image and time, return the most used or oldest image.
:param list_given: a list of tuples which hold seconds and FilePage
:type list_given: list
:return: the most used or oldest image
"""
# find the most used image
inx_found = None # index of found image
max_usage = 0 # hold max amount of using pages
for num, element in enumerate(list_given):
image = element[1]
image_used = len(list(image.usingPages()))
if image_used > max_usage:
max_usage = image_used
inx_found = num
if inx_found is not None:
return list_given[inx_found][1]
# find the oldest image
sec, image = max(list_given, key=lambda element: element[0])
return image
def check_image_on_commons(self) -> bool:
"""Checking if the file is on commons."""
pywikibot.output('Checking if [[{}]] is on commons...'
.format(self.image_name))
try:
hash_found = self.image.latest_file_info.sha1
except NoPageError:
return False # Image deleted, no hash found. Skip the image.
site = pywikibot.Site('commons', 'commons')
commons_image_with_this_hash = next(
iter(site.allimages(sha1=hash_found, total=1)), None)
if commons_image_with_this_hash:
service_template = pywikibot.translate(self.site,
SERVICE_TEMPLATES)
templates_in_the_image = self.image.templates()
if service_template is not None:
for template in service_template:
if pywikibot.Page(self.site,
template) in templates_in_the_image:
pywikibot.output(
"{} is on commons but it's a service image."
.format(self.image_name))
return True # continue with the check-part
pywikibot.output(self.image_name + ' is on commons!')
if self.image.file_is_shared():
pywikibot.output(
"But, the file doesn't exist on your project! Skip...")
# We have to skip the check part for that image because
# it's on commons but someone has added something on your
# project.
return False
if re.findall(r'\bstemma\b', self.image_name.lower()) and \
self.site.code == 'it':
pywikibot.output(
"{} has 'stemma' inside, means that it's ok."
.format(self.image_name))
return True
# It's not only on commons but the image needs a check
# the second usually is a url or something like that.
# Compare the two in equal way, both url.
repme = ((self.list_entry
+ "is also on '''Commons''': [[commons:File:%s]]")
% (self.image_name,
commons_image_with_this_hash.title(
with_ns=False)))
if (self.image.title(as_url=True)
== commons_image_with_this_hash.title(as_url=True)):
repme += ' (same name)'
self.report_image(self.image_name, self.rep_page, self.com, repme,
addings=False)
return True
def check_image_duplicated(self, duplicates_rollback) -> bool:
"""Function to check the duplicated files."""
dup_text = i18n.translate(self.site, DUPLICATES_TEXT)
dup_regex = i18n.translate(self.site, DUPLICATES_REGEX)
dup_talk_text = i18n.translate(self.site, DUPLICATES_USER_TALK_TEXT)
# Head of the message given to the author
dup_talk_head = i18n.twtranslate(
self.site, 'checkimages-doubles-head')
# Comment while bot reports the problem in the uploader's talk
dup_comment_talk = i18n.twtranslate(
self.site, 'checkimages-doubles-talk-comment')
# Comment used by the bot while it reports the problem in the image
dup_comment_image = i18n.twtranslate(
self.site, 'checkimages-doubles-file-comment')
image_page = pywikibot.FilePage(self.site, self.image_name)
hash_found = image_page.latest_file_info.sha1
duplicates = list(self.site.allimages(sha1=hash_found))
if not duplicates:
return False # Image deleted, no hash found. Skip the image.
if len(duplicates) > 1:
xdict = {'en':
'%(name)s has {{PLURAL:count'
'|a duplicate! Reporting it'
'|%(count)s duplicates! Reporting them}}...'}
pywikibot.output(i18n.translate('en', xdict,
{'name': self.image_name,
'count': len(duplicates) - 1}))
if dup_text and dup_regex:
time_image_list = []
for dup_page in duplicates:
if (dup_page.title(as_url=True) != self.image.title(
as_url=True)
or self.timestamp is None):
try:
self.timestamp = (
dup_page.latest_file_info.timestamp)
except PageRelatedError:
continue
data = self.timestamp.timetuple()
data_seconds = time.mktime(data)
time_image_list.append([data_seconds, dup_page])
older_image_page = self.important_image(time_image_list)
older_page_text = older_image_page.text
# And if the images are more than two?
string = ''
images_to_tag_list = []
for dup_page in duplicates:
if dup_page == older_image_page:
# the most used or oldest image
# not report also this as duplicate
continue
try:
dup_page_text = dup_page.text
except NoPageError:
continue
if not (re.findall(dup_regex, dup_page_text)
or re.findall(dup_regex, older_page_text)):
pywikibot.output(
'{} is a duplicate and has to be tagged...'
.format(dup_page))
images_to_tag_list.append(dup_page.title())
string += '* {}\n'.format(
dup_page.title(as_link=True, textlink=True))
else:
pywikibot.output(
"Already put the dupe-template in the files's page"
" or in the dupe's page. Skip.")
return False # Ok - Let's continue the checking phase
# true if the image are not to be tagged as dupes
only_report = False
# put only one image or the whole list according to the request
if '__images__' in dup_text:
text_for_the_report = dup_text.replace(
'__images__',
'\n{}* {}\n'.format(
string,
older_image_page.title(
as_link=True, textlink=True)))
else:
text_for_the_report = dup_text.replace(
'__image__',
older_image_page.title(as_link=True, textlink=True))
# Two iteration: report the "problem" to the user only once
# (the last)
if len(images_to_tag_list) > 1:
for image_to_tag in images_to_tag_list[:-1]:
fp = pywikibot.FilePage(self.site, image_to_tag)
already_reported_in_past = fp.revision_count(self.bots)
# if you want only one edit, the edit found should be
# more than 0 -> num - 1
if already_reported_in_past > duplicates_rollback - 1:
only_report = True
break
# Delete the image in the list where we're write on
image = self.image_namespace + image_to_tag
text_for_the_report = re.sub(
r'\n\*\[\[:{}\]\]'.format(re.escape(image)),
'', text_for_the_report)
self.report(text_for_the_report, image_to_tag,
comm_image=dup_comment_image, unver=True)
if images_to_tag_list and not only_report:
fp = pywikibot.FilePage(self.site, images_to_tag_list[-1])
already_reported_in_past = fp.revision_count(self.bots)
image_title = re.escape(self.image.title(as_url=True))
from_regex = (r'\n\*\[\[:{}{}\]\]'
.format(self.image_namespace, image_title))
# Delete the image in the list where we're write on
text_for_the_report = re.sub(from_regex, '',
text_for_the_report)
# if you want only one edit, the edit found should be more
# than 0 -> num - 1
if already_reported_in_past > duplicates_rollback - 1 or \
not dup_talk_text:
only_report = True
else:
self.report(
text_for_the_report, images_to_tag_list[-1],
dup_talk_text
% (older_image_page.title(with_ns=True),
string),
dup_talk_head, comm_talk=dup_comment_talk,
comm_image=dup_comment_image, unver=True)
if self.duplicates_report or only_report:
if only_report:
repme = ((self.list_entry + 'has the following duplicates '
"('''forced mode'''):")
% self.image.title(as_url=True))
else:
repme = (
(self.list_entry + 'has the following duplicates:')
% self.image.title(as_url=True))
for dup_page in duplicates:
if (dup_page.title(as_url=True)
== self.image.title(as_url=True)):
# the image itself, not report also this as duplicate
continue
repme += '\n** [[:{}{}]]'.format(
self.image_namespace, dup_page.title(as_url=True))
result = self.report_image(self.image_name, self.rep_page,
self.com, repme, addings=False)
if not result:
return True # If Errors, exit (but continue the check)
if older_image_page.title() != self.image_name:
# The image is a duplicate, it will be deleted. So skip the
# check-part, useless
return False
return True # Ok - No problem. Let's continue the checking phase
def report_image(self, image_to_report, rep_page=None, com=None,
rep_text=None, addings: bool = True) -> bool:
"""Report the files to the report page when needed."""
rep_page = rep_page or self.rep_page
com = com or self.com
rep_text = rep_text or self.list_entry + '~~~~~'
if addings:
# Adding the name of the image in the report if not done already
rep_text = rep_text % image_to_report
another_page = pywikibot.Page(self.site, rep_page)
try:
text_get = another_page.get()
except NoPageError:
text_get = ''
except IsRedirectPageError:
text_get = another_page.getRedirectTarget().get()
# Don't care for differences inside brackets.
end = rep_text.find('(', max(0, rep_text.find(']]')))
if end < 0:
end = None
short_text = rep_text[rep_text.find('[['):end].strip()
reported = True
# Skip if the message is already there.
if short_text in text_get:
pywikibot.output('{} is already in the report page.'
.format(image_to_report))
reported = False
elif len(text_get) >= self.log_full_number:
if self.log_full_error:
raise LogIsFull(
'The log page ({}) is full! Please delete the old files '
'reported.'.format(another_page.title()))
pywikibot.output(
'The log page ({}) is full! Please delete the old files '
' reported. Skip!'.format(another_page.title()))
# Don't report, but continue with the check
# (we don't know if this is the first time we check this file
# or not)
else:
# Adding the log
another_page.put(text_get + rep_text, summary=com, force=True,
minor=False)
pywikibot.output('...Reported...')
return reported
def takesettings(self) -> None:
"""Function to take the settings from the wiki."""
settings_page = i18n.translate(self.site, PAGE_WITH_SETTINGS)
try:
if not settings_page:
self.settings_data = None
else:
page = pywikibot.Page(self.site, settings_page)
self.settings_data = []
try:
testo = page.get()
number = 1
for m in SETTINGS_REGEX.finditer(testo):
name = str(m.group(1))
find_tipe = str(m.group(2))
find = str(m.group(3))
imagechanges = str(m.group(4))
summary = str(m.group(5))
head = str(m.group(6))
text = str(m.group(7))
mexcatched = str(m.group(8))
tupla = [number, name, find_tipe, find, imagechanges,
summary, head, text, mexcatched]
self.settings_data += [tupla]
number += 1
if not self.settings_data:
pywikibot.output(
"You've set wrongly your settings, please take a "
'look to the relative page. (run without them)')
self.settings_data = None
except NoPageError:
pywikibot.output("The settings' page doesn't exist!")
self.settings_data = None
except Error:
pywikibot.output(
'Problems with loading the settigs, run without them.')
self.settings_data = None
self.some_problem = False
if not self.settings_data:
self.settings_data = None
# Real-Time page loaded
if self.settings_data:
pywikibot.output('>> Loaded the real-time page... <<')
else:
pywikibot.output('>> No additional settings found! <<')
def load_licenses(self) -> List[pywikibot.Page]:
"""Load the list of the licenses."""
cat_name = i18n.translate(self.site, CATEGORY_WITH_LICENSES)
if not cat_name:
raise TranslationError(
'No allowed licenses category provided in '
'"CATEGORY_WITH_LICENSES" dict for your project!')
pywikibot.output('\nLoading the allowed licenses...\n')
cat = pywikibot.Category(self.site, cat_name)
list_licenses = list(cat.articles())
if self.site.code == 'commons':
no_licenses_to_skip = pywikibot.Category(self.site,
'License-related tags')
for license_given in no_licenses_to_skip.articles():
if license_given in list_licenses:
list_licenses.remove(license_given)
pywikibot.output('')
# Add the licenses set in the default page as licenses to check
if self.page_allowed:
try:
page_allowed_text = pywikibot.Page(self.site,
self.page_allowed).get()
except (NoPageError, IsRedirectPageError):
page_allowed_text = ''
for name_license in self.load(page_allowed_text):
page_license = pywikibot.Page(self.site, name_license)
if page_license not in list_licenses:
# the list has wiki-pages
list_licenses.append(page_license)
return list_licenses
def mini_template_check(self, template) -> bool:
"""Check if template is in allowed licenses or in licenses to skip."""
# the list_licenses are loaded in the __init__
# (not to load them multimple times)
if template in self.list_licenses:
self.license_selected = template.title(with_ns=False)
self.seems_ok = True
# let the last "fake" license normally detected
self.license_found = self.license_selected
return True
if template in self.hiddentemplates:
# if the whitetemplate is not in the images description, we don't
# care
try:
self.all_licenses.remove(template)
except ValueError:
return False
else:
self.white_templates_found = True
return False
def template_in_list(self) -> None:
"""
Check if template is in list.
The problem is the calls to the Mediawiki system because they can be
pretty slow. While searching in a list of objects is really fast, so
first of all let's see if we can find something in the info that we
already have, then make a deeper check.
"""
for template in self.licenses_found:
if self.mini_template_check(template):
break
if not self.license_found:
for template in self.licenses_found:
if template.isRedirectPage():
template = template.getRedirectTarget()
if self.mini_template_check(template):
break
def smart_detection(self) -> Tuple[str, bool]:
"""
Detect templates.
The bot instead of checking if there's a simple template in the
image's description, checks also if that template is a license or
something else. In this sense this type of check is smart.
"""
self.seems_ok = False
self.license_found = None
self.white_templates_found = False
regex_find_licenses = re.compile(
r'(?<!\{)\{\{(?:[Tt]emplate:|)([^{]+?)[|\n<}]', re.DOTALL)
regex_are_licenses = re.compile(
r'(?<!\{)\{\{(?:[Tt]emplate:|)([^{]+?)\}\}', re.DOTALL)
while True:
self.load_hidden_templates()
self.licenses_found = self.image.templates()
templates_in_the_image_raw = regex_find_licenses.findall(
self.image_check_text)
if not self.licenses_found and templates_in_the_image_raw:
# {{nameTemplate|something <- this is not a template, be sure
# that we haven't catch something like that.
licenses_test = regex_are_licenses.findall(
self.image_check_text)
if not self.licenses_found and licenses_test:
raise Error(
"Invalid or broken templates found in the image's "
'page {}!'.format(self.image))
self.all_licenses = []
if not self.list_licenses:
raise TranslationError(
'No allowed licenses found in "CATEGORY_WITH_LICENSES" '
'category for your project!')
# Found the templates ONLY in the image's description
for template_selected in templates_in_the_image_raw:
tp = pywikibot.Page(self.site, template_selected)
for template_real in self.licenses_found:
if (tp.title(as_url=True, with_ns=False).lower()
== template_real.title(as_url=True,
with_ns=False).lower()):
if template_real not in self.all_licenses:
self.all_licenses.append(template_real)
break
if self.licenses_found:
self.template_in_list()
if not self.license_found and self.all_licenses:
self.all_licenses = [
template.getRedirectTarget()
if template.isRedirectPage() else template
for template in self.all_licenses if template.exists()]
if self.all_licenses:
self.license_found = self.all_licenses[0].title()
# If it has "some_problem" it must check the additional settings.
self.some_problem = False
if self.settings_data:
# use additional settings
self.find_additional_problems()
if self.some_problem:
if self.mex_used in self.image_check_text:
pywikibot.output('File already fixed. Skipping.')
else:
pywikibot.output(
"The file's description for {} contains {}..."
.format(self.image_name, self.name_used))
if self.mex_used.lower() == 'default':
self.mex_used = self.unvertext
if self.imagestatus_used:
reported = True
else:
reported = self.report_image(self.image_name)
if reported:
self.report(self.mex_used, self.image_name, self.text_used,
self.head_used, None,
self.imagestatus_used, self.summary_used)
else:
pywikibot.output('Skipping the file...')
self.some_problem = False
else:
if not self.seems_ok and self.license_found:
rep_text_license_fake = ((self.list_entry
+ "seems to have a ''fake license'',"
' license detected:'
' <nowiki>%s</nowiki>') %
(self.image_name, self.license_found))
print_with_time_zone(
'{} seems to have a fake license: {}, reporting...'
.format(self.image_name, self.license_found))
self.report_image(self.image_name,
rep_text=rep_text_license_fake,
addings=False)
elif self.license_found:
pywikibot.output('[[%s]] seems ok, license found: {{%s}}...'
% (self.image_name, self.license_found))
return (self.license_found, self.white_templates_found)
def load(self, raw) -> List[str]:
"""Load a list of objects from a string using regex."""
list_loaded = []
# I search with a regex how many user have not the talk page
# and i put them in a list (i find it more easy and secure)
regl = r"(\"|\')(.*?)\1(?:,|\])"
pl = re.compile(regl)
for xl in pl.finditer(raw):
word = xl.group(2).replace('\\\\', '\\')
if word not in list_loaded:
list_loaded.append(word)
return list_loaded
def skip_images(self, skip_number, limit) -> bool:
"""Given a number of files, skip the first -number- files."""
# If the images to skip are more the images to check, make them the
# same number
if skip_number == 0:
pywikibot.output('\t\t>> No files to skip...<<')
return False
skip_number = min(skip_number, limit)
# Print a starting message only if no images has been skipped
if not self.skip_list:
pywikibot.output(
i18n.translate(
'en',
'Skipping the first {{PLURAL:num|file|%(num)s files}}:\n',
{'num': skip_number}))
# If we still have pages to skip:
if len(self.skip_list) < skip_number:
pywikibot.output('Skipping {}...'.format(self.image_name))
self.skip_list.append(self.image_name)
if skip_number == 1:
pywikibot.output('')
return True
pywikibot.output('')
return False
@staticmethod
def wait(generator, wait_time) -> Generator[pywikibot.FilePage, None,
None]:
"""
Skip the images uploaded before x seconds.
Let the users to fix the image's problem alone in the first x seconds.
"""
print_with_time_zone(
'Skipping the files uploaded less than {} seconds ago..'
.format(wait_time))
for page in generator:
image = pywikibot.FilePage(page)
try:
timestamp = image.latest_file_info.timestamp
except PageRelatedError:
continue
now = pywikibot.Timestamp.utcnow()
delta = now - timestamp
if delta.total_seconds() > wait_time:
yield image
else:
pywikibot.warning(
'Skipping {}, uploaded {} {} ago..'
.format(image.title(), delta.days, 'days')
if delta.days > 0
else (image.title(), delta.seconds, 'seconds'))
def is_tagged(self) -> bool:
"""Understand if a file is already tagged or not."""
# TODO: enhance and use textlib.MultiTemplateMatchBuilder
# Is the image already tagged? If yes, no need to double-check, skip
no_license = i18n.translate(self.site, TXT_FIND)
if not no_license:
raise TranslationError(
'No no-license templates provided in "TXT_FIND" dict '
'for your project!')
for i in no_license:
# If there are {{ use regex, otherwise no (if there's not the
# {{ may not be a template and the regex will be wrong)
if '{{' in i:
regex_pattern = re.compile(
r'\{\{(?:template)?%s ?(?:\||\r?\n|\}|<|/) ?'
% i.split('{{')[1].replace(' ', '[ _]'), re.I)
result = regex_pattern.findall(self.image_check_text)
if result:
return True
elif i.lower() in self.image_check_text:
return True
return False
def find_additional_problems(self) -> None:
"""Extract additional settings from configuration page."""
# In every tuple there's a setting configuration
for tupla in self.settings_data:
name = tupla[1]
find_tipe = tupla[2]
find = tupla[3]
find_list = self.load(find)
imagechanges = tupla[4]
if imagechanges.lower() == 'false':
imagestatus = False
elif imagechanges.lower() == 'true':
imagestatus = True
else:
pywikibot.error('Imagechanges set wrongly!')
self.settings_data = None
break
summary = tupla[5]
head_2 = tupla[6]
if head_2.count('==') == 2:
head_2 = re.findall(r'\s*== *(.+?) *==\s*', head_2)[0]
text = tupla[7] % self.image_name
mex_catched = tupla[8]
for k in find_list:
if find_tipe.lower() == 'findonly':
search_results = re.findall(r'{}'.format(k.lower()),
self.image_check_text.lower())
if search_results:
if search_results[0] == self.image_check_text.lower():
self.some_problem = True
self.text_used = text
self.head_used = head_2
self.imagestatus_used = imagestatus
self.name_used = name
self.summary_used = summary
self.mex_used = mex_catched
break
elif find_tipe.lower() == 'find':
if re.findall(r'{}'.format(k.lower()),
self.image_check_text.lower()):
self.some_problem = True
self.text_used = text
self.head_used = head_2
self.imagestatus_used = imagestatus
self.name_used = name
self.summary_used = summary
self.mex_used = mex_catched
continue
def check_step(self) -> None:
"""Check a single file page."""
# something = Minimal requirements for an image description.
# If this fits, no tagging will take place
# (if there aren't other issues)
# MIT license is ok on italian wikipedia, let also this here
# Don't put "}}" here, please. Useless and can give problems.
something = ['{{']
# Allowed extensions
try:
allowed_formats = self.site.siteinfo.get(
'fileextensions', get_default=False)
except KeyError:
allowed_formats = []
else:
allowed_formats = [item['ext'].lower() for item in allowed_formats]
brackets = False
delete = False
notification = None
# get the extension from the image's name
extension = self.image_name.split('.')[-1]
# Load the notification messages
hidden_template_notification = i18n.translate(
self.site, HIDDEN_TEMPALTE_NOTIFICATION)
self.unvertext = i18n.translate(self.site, N_TXT)
di = i18n.translate(self.site, DELETE_IMMEDIATELY)
# The header of the Unknown extension's message.
dih = i18n.twtranslate(self.site, 'checkimages-unknown-extension-head')
# Text that will be add if the bot find a unknown extension.
din = i18n.twtranslate(self.site,
'checkimages-unknown-extension-msg') + ' ~~~~'
# Header that the bot will add if the image hasn't the license.
nh = i18n.twtranslate(self.site, 'checkimages-no-license-head')
# Summary of the delete immediately.
dels = i18n.twtranslate(self.site, 'checkimages-deletion-comment')
nn = i18n.translate(self.site, NOTHING_NOTIFICATION)
smwl = i18n.translate(self.site, SECOND_MESSAGE_WITHOUT_LICENSE)
try:
self.image_check_text = self.image.get()
except NoPageError:
pywikibot.output('Skipping {} because it has been deleted.'
.format(self.image_name))
return
except IsRedirectPageError:
pywikibot.output("Skipping {} because it's a redirect."
.format(self.image_name))
return
# Delete the fields where the templates cannot be loaded
regex_nowiki = re.compile(r'<nowiki>(.*?)</nowiki>', re.DOTALL)
regex_pre = re.compile(r'<pre>(.*?)</pre>', re.DOTALL)
self.image_check_text = regex_nowiki.sub('', self.image_check_text)
self.image_check_text = regex_pre.sub('', self.image_check_text)
# Deleting the useless template from the description (before adding
# sth in the image the original text will be reloaded, don't worry).
if self.is_tagged():
print_with_time_zone('{} is already tagged.'
.format(self.image_name))
return
# something is the array with {{, MIT License and so on.
for a_word in something:
if a_word in self.image_check_text:
# There's a template, probably a license
brackets = True
# Is the extension allowed? (is it an image or f.e. a .xls file?)
if allowed_formats and extension.lower() not in allowed_formats:
delete = True
(license_found, hidden_template_found) = self.smart_detection()
# Here begins the check block.
if brackets and license_found:
return
if delete:
pywikibot.output('{} is not a file!'.format(self.image_name))
if not di:
pywikibot.output('No localized message given for '
"'DELETE_IMMEDIATELY'. Skipping.")
return
# Some formatting for delete immediately template
dels = dels % {'adding': di}
di = '\n' + di
# Modify summary text
config.default_edit_summary = dels
canctext = di % extension
notification = din % {'file': self.image.title(as_link=True,
textlink=True)}
head = dih
self.report(canctext, self.image_name, notification, head)
return
if not self.image_check_text.strip(): # empty image description
pywikibot.output(
"The file's description for {} does not contain a license "
' template!'.format(self.image_name))
if hidden_template_found and hidden_template_notification:
notification = hidden_template_notification % self.image_name
elif nn:
notification = nn % self.image_name
head = nh
self.report(self.unvertext, self.image_name, notification, head,
smwl)
return
pywikibot.output('{} has only text and not the specific '
'license...'.format(self.image_name))
if hidden_template_found and hidden_template_notification:
notification = hidden_template_notification % self.image_name
elif nn:
notification = nn % self.image_name
head = nh
self.report(self.unvertext, self.image_name, notification, head, smwl)
def main(*args: str) -> bool:
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
:param args: command line arguments
"""
# Command line configurable parameters
repeat = True # Restart after having check all the images?
limit = 80 # How many images check?
time_sleep = 30 # How many time sleep after the check?
skip_number = 0 # How many images to skip before checking?
wait_time = 0 # How many time sleep before the check?
commons_active = False # Is there an image with the same name at commons?
normal = False # Check the new images or use another generator?
url_used = False # Use the url-related function instead of the new-pages
regex_gen = False # Use the regex generator
duplicates_active = False # Use the duplicate option
duplicates_report = False # Use the duplicate-report option
max_user_notify = None
sendemail_active = False # Use the send-email
log_full_error = True # Raise an error when the log is full
generator = None
unknown = [] # unknown parameters
local_args = pywikibot.handle_args(args)
site = pywikibot.Site()
# Here below there are the local parameters.
for arg in local_args:
option, _, value = arg.partition(':')
if option == '-limit':
limit = int(value or pywikibot.input(
'How many files do you want to check?'))
elif option == '-sleep':
time_sleep = int(value or pywikibot.input(
'How many seconds do you want runs to be apart?'))
elif option == '-break':
repeat = False
elif option == '-nologerror':
log_full_error = False
elif option == '-commons':
commons_active = True
elif option == '-duplicatesreport':
duplicates_report = True
elif option == '-duplicates':
duplicates_active = True
duplicates_rollback = int(value or 1)
elif option == '-maxusernotify':
max_user_notify = int(value or pywikibot.input(
'What should be the maximum number of notifications per user '
'per check?'))
elif option == '-sendemail':
sendemail_active = True
elif option == '-skip':
skip_number = int(value or pywikibot.input(
'How many files do you want to skip?'))
elif option == '-wait':
wait_time = int(value or pywikibot.input(
'How many time do you want to wait before checking the '
'files?'))
elif option == '-start':
first_page_title = value or pywikibot.input(
'From which page do you want to start?')
namespaces = tuple(
ns + ':' for ns in site.namespace(Namespace.FILE, True))
if first_page_title.startswith(namespaces):
first_page_title = first_page_title.split(':', 1)[1]
generator = site.allimages(start=first_page_title)
repeat = False
elif option == '-page':
regex_page_name = value or pywikibot.input(
'Which page do you want to use for the regex?')
repeat = False
regex_gen = True
elif option == '-url':
regex_page_url = value or pywikibot.input(
'Which url do you want to use for the regex?')
url_used = True
repeat = False
regex_gen = True
elif option == '-regex':
regexp_to_use = value or pywikibot.input(
'Which regex do you want to use?')
generator = 'regex'
repeat = False
elif option == '-cat':
cat_name = value or pywikibot.input('In which category do I work?')
cat = pywikibot.Category(site, 'Category:' + cat_name)
generator = cat.articles(namespaces=[6])
repeat = False
elif option == '-ref':
ref_name = value or pywikibot.input(
'The references of what page should I parse?')
ref = pywikibot.Page(site, ref_name)
generator = ref.getReferences(namespaces=[6])
repeat = False
else:
unknown.append(arg)
if not generator:
normal = True
# Ensure that the bot is localized and right command args are given
if site.code not in PROJECT_INSERTED:
additional_text = ('Your project is not supported by this script.\n'
'To allow your project in the script you have to '
'add a localization into the script and add your '
'project to the "PROJECT_INSERTED" list!')
else:
additional_text = ''
if suggest_help(unknown_parameters=unknown,
additional_text=additional_text):
return False
# Reading the log of the new images if another generator is not given.
if normal:
if limit == 1:
pywikibot.output('Retrieving the latest file for checking...')
else:
pywikibot.output('Retrieving the latest {} files for checking...'
.format(limit))
while True:
# Defing the Main Class.
bot = CheckImagesBot(site, sendemail_active=sendemail_active,
duplicates_report=duplicates_report,
log_full_error=log_full_error,
max_user_notify=max_user_notify)
if normal:
generator = pg.NewimagesPageGenerator(total=limit, site=site)
# if url_used and regex_gen, get the source for the generator
if url_used and regex_gen:
text_regex = site.getUrl(regex_page_url, no_hostname=True)
# Not an url but a wiki page as "source" for the regex
elif regex_gen:
page = pywikibot.Page(site, regex_page_name)
try:
text_regex = page.get()
except NoPageError:
pywikibot.output("{} doesn't exist!".format(page.title()))
text_regex = '' # No source, so the bot will quit later.
# If generator is the regex' one, use your own Generator using an url
# or page and a regex.
if generator == 'regex' and regex_gen:
generator = bot.regex_generator(regexp_to_use, text_regex)
bot.takesettings()
if wait_time > 0:
generator = bot.wait(generator, wait_time)
for image in generator:
# Setting the image for the main class
bot.set_parameters(image)
if skip_number and bot.skip_images(skip_number, limit):
continue
# Check on commons if there's already an image with the same name
if commons_active and site.family.name != 'commons':
if not bot.check_image_on_commons():
continue
# Check if there are duplicates of the image on the project
if duplicates_active:
if not bot.check_image_duplicated(duplicates_rollback):
continue
bot.check_step()
if repeat:
pywikibot.output('Waiting for {} seconds,'.format(time_sleep))
pywikibot.sleep(time_sleep)
else:
break
return True
if __name__ == '__main__':
start = time.time()
ret = False
try:
ret = main()
except KeyboardInterrupt:
ret = True
finally:
if ret is not False:
final = time.time()
delta = int(final - start)
pywikibot.output('Execution time: {} seconds\n'.format(delta))
| wikimedia/pywikibot-core | scripts/checkimages.py | Python | mit | 76,106 |
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible.plugins.callback import CallbackBase
from ansible.utils.color import colorize, hostcolor
class CallbackModule(CallbackBase):
'''
This is the default callback interface, which simply prints messages
to stdout when new callback events are received.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'default'
def __init__(self):
self._play = None
self._last_task_banner = None
super(CallbackModule, self).__init__()
def v2_runner_on_failed(self, result, ignore_errors=False):
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if 'exception' in result._result:
if self._display.verbosity < 3:
# extract just the actual error message from the exception text
error = result._result['exception'].strip().split('\n')[-1]
msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
else:
msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception']
self._display.display(msg, color=C.COLOR_ERROR)
self._handle_warnings(result._result)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
if delegated_vars:
self._display.display("fatal: [%s -> %s]: FAILED! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color=C.COLOR_ERROR)
else:
self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color=C.COLOR_ERROR)
if ignore_errors:
self._display.display("...ignoring", color=C.COLOR_SKIP)
def v2_runner_on_ok(self, result):
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
self._clean_results(result._result, result._task.action)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
self._clean_results(result._result, result._task.action)
if result._task.action in ('include', 'include_role'):
return
elif result._result.get('changed', False):
if delegated_vars:
msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "changed: [%s]" % result._host.get_name()
color = C.COLOR_CHANGED
else:
if delegated_vars:
msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "ok: [%s]" % result._host.get_name()
color = C.COLOR_OK
self._handle_warnings(result._result)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += " => %s" % (self._dump_results(result._result),)
self._display.display(msg, color=color)
def v2_runner_on_skipped(self, result):
if C.DISPLAY_SKIPPED_HOSTS:
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
msg = "skipping: [%s]" % result._host.get_name()
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
def v2_runner_on_unreachable(self, result):
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if delegated_vars:
self._display.display("fatal: [%s -> %s]: UNREACHABLE! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color=C.COLOR_UNREACHABLE)
else:
self._display.display("fatal: [%s]: UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result)), color=C.COLOR_UNREACHABLE)
def v2_playbook_on_no_hosts_matched(self):
self._display.display("skipping: no hosts matched", color=C.COLOR_SKIP)
def v2_playbook_on_no_hosts_remaining(self):
self._display.banner("NO MORE HOSTS LEFT")
def v2_playbook_on_task_start(self, task, is_conditional):
if self._play.strategy != 'free':
self._print_task_banner(task)
def _print_task_banner(self, task):
# args can be specified as no_log in several places: in the task or in
# the argument spec. We can check whether the task is no_log but the
# argument spec can't be because that is only run on the target
# machine and we haven't run it thereyet at this time.
#
# So we give people a config option to affect display of the args so
# that they can secure this if they feel that their stdout is insecure
# (shoulder surfing, logging stdout straight to a file, etc).
args = ''
if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT:
args = u', '.join(u'%s=%s' % a for a in task.args.items())
args = u' %s' % args
self._display.banner(u"TASK [%s%s]" % (task.get_name().strip(), args))
if self._display.verbosity >= 2:
path = task.get_path()
if path:
self._display.display(u"task path: %s" % path, color=C.COLOR_DEBUG)
self._last_task_banner = task._uuid
def v2_playbook_on_cleanup_task_start(self, task):
self._display.banner("CLEANUP TASK [%s]" % task.get_name().strip())
def v2_playbook_on_handler_task_start(self, task):
self._display.banner("RUNNING HANDLER [%s]" % task.get_name().strip())
def v2_playbook_on_play_start(self, play):
name = play.get_name().strip()
if not name:
msg = u"PLAY"
else:
msg = u"PLAY [%s]" % name
self._play = play
self._display.banner(msg)
def v2_on_file_diff(self, result):
if result._task.loop and 'results' in result._result:
for res in result._result['results']:
if 'diff' in res and res['diff'] and res.get('changed', False):
diff = self._get_diff(res['diff'])
if diff:
self._display.display(diff)
elif 'diff' in result._result and result._result['diff'] and result._result.get('changed', False):
diff = self._get_diff(result._result['diff'])
if diff:
self._display.display(diff)
def v2_runner_item_on_ok(self, result):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if result._task.action in ('include', 'include_role'):
return
elif result._result.get('changed', False):
msg = 'changed'
color = C.COLOR_CHANGED
else:
msg = 'ok'
color = C.COLOR_OK
if delegated_vars:
msg += ": [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg += ": [%s]" % result._host.get_name()
msg += " => (item=%s)" % (self._get_item(result._result),)
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=color)
def v2_runner_item_on_failed(self, result):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if 'exception' in result._result:
if self._display.verbosity < 3:
# extract just the actual error message from the exception text
error = result._result['exception'].strip().split('\n')[-1]
msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
else:
msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception']
self._display.display(msg, color=C.COLOR_ERROR)
msg = "failed: "
if delegated_vars:
msg += "[%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg += "[%s]" % (result._host.get_name())
self._handle_warnings(result._result)
self._display.display(msg + " (item=%s) => %s" % (self._get_item(result._result), self._dump_results(result._result)), color=C.COLOR_ERROR)
def v2_runner_item_on_skipped(self, result):
if C.DISPLAY_SKIPPED_HOSTS:
msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), self._get_item(result._result))
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
def v2_playbook_on_include(self, included_file):
msg = 'included: %s for %s' % (included_file._filename, ", ".join([h.name for h in included_file._hosts]))
self._display.display(msg, color=C.COLOR_SKIP)
def v2_playbook_on_stats(self, stats):
self._display.banner("PLAY RECAP")
hosts = sorted(stats.processed.keys())
for h in hosts:
t = stats.summarize(h)
self._display.display(u"%s : %s %s %s %s" % (
hostcolor(h, t),
colorize(u'ok', t['ok'], C.COLOR_OK),
colorize(u'changed', t['changed'], C.COLOR_CHANGED),
colorize(u'unreachable', t['unreachable'], C.COLOR_UNREACHABLE),
colorize(u'failed', t['failures'], C.COLOR_ERROR)),
screen_only=True
)
self._display.display(u"%s : %s %s %s %s" % (
hostcolor(h, t, False),
colorize(u'ok', t['ok'], None),
colorize(u'changed', t['changed'], None),
colorize(u'unreachable', t['unreachable'], None),
colorize(u'failed', t['failures'], None)),
log_only=True
)
self._display.display("", screen_only=True)
# print custom stats
if C.SHOW_CUSTOM_STATS and stats.custom:
self._display.banner("CUSTOM STATS: ")
# per host
#TODO: come up with 'pretty format'
for k in sorted(stats.custom.keys()):
if k == '_run':
continue
self._display.display('\t%s: %s' % (k, self._dump_results(stats.custom[k], indent=1).replace('\n','')))
# print per run custom stats
if '_run' in stats.custom:
self._display.display("", screen_only=True)
self._display.display('\tRUN: %s' % self._dump_results(stats.custom['_run'], indent=1).replace('\n',''))
self._display.display("", screen_only=True)
def v2_playbook_on_start(self, playbook):
if self._display.verbosity > 1:
from os.path import basename
self._display.banner("PLAYBOOK: %s" % basename(playbook._file_name))
if self._display.verbosity > 3:
if self._options is not None:
for option in dir(self._options):
if option.startswith('_') or option in ['read_file', 'ensure_value', 'read_module']:
continue
val = getattr(self._options,option)
if val:
self._display.vvvv('%s: %s' % (option,val))
def v2_runner_retry(self, result):
task_name = result.task_name or result._task
msg = "FAILED - RETRYING: %s (%d retries left)." % (task_name, result._result['retries'] - result._result['attempts'])
if (self._display.verbosity > 2 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += "Result was: %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_DEBUG)
| bobobox/ansible | lib/ansible/plugins/callback/default.py | Python | gpl-3.0 | 13,893 |
"Example of application main using import."
import mymodule
import mypkg
from mypkg import pkg_mod
print(__doc__)
print(mymodule.fun())
print(mypkg.f1())
print(pkg_mod.f2()) | keobox/yap101 | main.py | Python | mit | 175 |
# -*- coding: utf-8 -*-
"""Test dbtoyaml and yamltodb using pagila schema
See http://cvs.pgfoundry.org/cgi-bin/cvsweb.cgi/dbsamples/pagila/
pagila-schema.sql?rev=1.8
"""
from difflib import unified_diff
from pyrseas.testutils import DbMigrateTestCase
class PagilaTestCase(DbMigrateTestCase):
def setUp(self):
super(DbMigrateTestCase, self).setUp()
self.add_public_schema(self.srcdb)
self.add_public_schema(self.db)
@classmethod
def tearDown(cls):
cls.remove_tempfiles('pagila')
cls.remove_tempfiles('empty')
def test_pagila(self):
if self.db.version < 90600:
self.skipTest('Only available on PG 9.6 and later')
# Create the source schema
self.execute_script(__file__, 'pagila-schema.sql')
# Run pg_dump against source database
srcdump = self.tempfile_path('pagila-src.dump')
self.run_pg_dump(srcdump, True)
# Create source YAML file
srcyaml = self.tempfile_path('pagila-src.yaml')
self.create_yaml(srcyaml, True)
# Run pg_dump/dbtoyaml against empty target database
emptydump = self.tempfile_path('empty.dump')
self.run_pg_dump(emptydump)
emptyyaml = self.tempfile_path('empty.yaml')
self.create_yaml(emptyyaml)
# Migrate the target database
targsql = self.tempfile_path('pagila.sql')
self.migrate_target(srcyaml, targsql)
# Run pg_dump against target database
targdump = self.tempfile_path('pagila.dump')
self.run_pg_dump(targdump)
# Create target YAML file
targyaml = self.tempfile_path('pagila.yaml')
self.create_yaml(targyaml)
# diff pagila-src.dump against pagila.dump
# order of triggers requires special handling
adds = []
subs = []
for line in unified_diff(self.lines(srcdump), self.lines(targdump)):
if line == '--- \n' or line == '+++ \n' or line.startswith('@@'):
continue
if line[:1] == '+':
adds.append(line[1:-1])
elif line[:1] == '-':
subs.append(line[1:-1])
subs = sorted(subs)
for i, line in enumerate(sorted(adds)):
assert line == subs[i]
# diff pagila-src.yaml against pagila.yaml
assert self.lines(srcyaml) == self.lines(targyaml)
# Undo the changes
self.migrate_target(emptyyaml, targsql)
# Workaround problem with privileges on schema public
self.db.execute("GRANT ALL ON SCHEMA public TO postgres")
self.db.conn.commit()
# Run pg_dump against target database
self.run_pg_dump(targdump)
self.db.execute("REVOKE ALL ON SCHEMA public FROM postgres")
self.db.conn.commit()
# Create target YAML file
self.create_yaml(targyaml)
# diff empty.dump against pagila.dump
assert self.lines(emptydump) == self.lines(targdump)
# diff empty.yaml against pagila.yaml
assert self.lines(emptyyaml) == self.lines(targyaml)
| perseas/Pyrseas | tests/functional/test_pagila.py | Python | bsd-3-clause | 3,085 |
######
##
## The Python3 code
## is generated from ATS source by atscc2py3
## The starting compilation time is: 2016-7-21: 12h:55m
##
######
######
#ATSextcode_beg()
######
######
from ats2pypre_basics_cats import *
######
from ats2pypre_integer_cats import *
from ats2pypre_bool_cats import *
######
######
#ATSextcode_end()
######
def _ats2pypre_stream_patsfun_1__closurerize(env0, env1):
def _ats2pypre_stream_patsfun_1__cfun(cenv): return _ats2pypre_stream_patsfun_1(cenv[1], cenv[2])
return (_ats2pypre_stream_patsfun_1__cfun, env0, env1)
def _ats2pypre_stream_patsfun_3__closurerize(env0, env1):
def _ats2pypre_stream_patsfun_3__cfun(cenv): return _ats2pypre_stream_patsfun_3(cenv[1], cenv[2])
return (_ats2pypre_stream_patsfun_3__cfun, env0, env1)
def _ats2pypre_stream_patsfun_6__closurerize(env0, env1):
def _ats2pypre_stream_patsfun_6__cfun(cenv): return _ats2pypre_stream_patsfun_6(cenv[1], cenv[2])
return (_ats2pypre_stream_patsfun_6__cfun, env0, env1)
def _ats2pypre_stream_patsfun_8__closurerize(env0):
def _ats2pypre_stream_patsfun_8__cfun(cenv): return _ats2pypre_stream_patsfun_8(cenv[1])
return (_ats2pypre_stream_patsfun_8__cfun, env0)
def _ats2pypre_stream_patsfun_10__closurerize(env0):
def _ats2pypre_stream_patsfun_10__cfun(cenv): return _ats2pypre_stream_patsfun_10(cenv[1])
return (_ats2pypre_stream_patsfun_10__cfun, env0)
def _ats2pypre_stream_patsfun_12__closurerize(env0, env1):
def _ats2pypre_stream_patsfun_12__cfun(cenv): return _ats2pypre_stream_patsfun_12(cenv[1], cenv[2])
return (_ats2pypre_stream_patsfun_12__cfun, env0, env1)
def ats2pypre_stream_map_cloref(arg0, arg1):
tmpret0 = None
funlab_py = None
tmplab_py = None
#__patsflab_stream_map_cloref
tmpret0 = [0, _ats2pypre_stream_patsfun_1__closurerize(arg0, arg1)]
return tmpret0
def _ats2pypre_stream_patsfun_1(env0, env1):
tmpret1 = None
tmp2 = None
tmp3 = None
tmp4 = None
tmp5 = None
tmp6 = None
funlab_py = None
tmplab_py = None
mbranch_1 = None
def __atstmplab0():
nonlocal env0, env1
nonlocal tmpret1, tmp2, tmp3, tmp4, tmp5, tmp6
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
if(ATSCKptriscons(tmp2)): tmplab_py = 4 ; return#__atstmplab3
__atstmplab1()
return
def __atstmplab1():
nonlocal env0, env1
nonlocal tmpret1, tmp2, tmp3, tmp4, tmp5, tmp6
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
tmpret1 = None
return
def __atstmplab2():
nonlocal env0, env1
nonlocal tmpret1, tmp2, tmp3, tmp4, tmp5, tmp6
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
__atstmplab3()
return
def __atstmplab3():
nonlocal env0, env1
nonlocal tmpret1, tmp2, tmp3, tmp4, tmp5, tmp6
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
tmp3 = tmp2[0]
tmp4 = tmp2[1]
tmp5 = env1[0](env1, tmp3)
tmp6 = ats2pypre_stream_map_cloref(tmp4, env1)
tmpret1 = (tmp5, tmp6)
return
mbranch_1 = { 1: __atstmplab0, 2: __atstmplab1, 3: __atstmplab2, 4: __atstmplab3 }
#__patsflab__ats2pypre_stream_patsfun_1
ATSPMVlazyval_eval(env0); tmp2 = env0[1]
#ATScaseofseq_beg
tmplab_py = 1
while(1):
mbranch_1.get(tmplab_py)()
if (tmplab_py == 0): break
#ATScaseofseq_end
return tmpret1
def ats2pypre_stream_filter_cloref(arg0, arg1):
tmpret7 = None
funlab_py = None
tmplab_py = None
#__patsflab_stream_filter_cloref
tmpret7 = [0, _ats2pypre_stream_patsfun_3__closurerize(arg0, arg1)]
return tmpret7
def _ats2pypre_stream_patsfun_3(env0, env1):
tmpret8 = None
tmp9 = None
tmp10 = None
tmp11 = None
tmp12 = None
tmp13 = None
tmp14 = None
funlab_py = None
tmplab_py = None
mbranch_1 = None
def __atstmplab4():
nonlocal env0, env1
nonlocal tmpret8, tmp9, tmp10, tmp11, tmp12, tmp13, tmp14
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
if(ATSCKptriscons(tmp9)): tmplab_py = 4 ; return#__atstmplab7
__atstmplab5()
return
def __atstmplab5():
nonlocal env0, env1
nonlocal tmpret8, tmp9, tmp10, tmp11, tmp12, tmp13, tmp14
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
tmpret8 = None
return
def __atstmplab6():
nonlocal env0, env1
nonlocal tmpret8, tmp9, tmp10, tmp11, tmp12, tmp13, tmp14
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
__atstmplab7()
return
def __atstmplab7():
nonlocal env0, env1
nonlocal tmpret8, tmp9, tmp10, tmp11, tmp12, tmp13, tmp14
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
tmp10 = tmp9[0]
tmp11 = tmp9[1]
tmp12 = env1[0](env1, tmp10)
if (tmp12):
tmp13 = ats2pypre_stream_filter_cloref(tmp11, env1)
tmpret8 = (tmp10, tmp13)
else:
tmp14 = ats2pypre_stream_filter_cloref(tmp11, env1)
ATSPMVlazyval_eval(tmp14); tmpret8 = tmp14[1]
#endif
return
mbranch_1 = { 1: __atstmplab4, 2: __atstmplab5, 3: __atstmplab6, 4: __atstmplab7 }
#__patsflab__ats2pypre_stream_patsfun_3
ATSPMVlazyval_eval(env0); tmp9 = env0[1]
#ATScaseofseq_beg
tmplab_py = 1
while(1):
mbranch_1.get(tmplab_py)()
if (tmplab_py == 0): break
#ATScaseofseq_end
return tmpret8
def ats2pypre_stream_tabulate_cloref(arg0):
tmpret15 = None
funlab_py = None
tmplab_py = None
#__patsflab_stream_tabulate_cloref
tmpret15 = _ats2pypre_stream_aux_5(arg0, 0)
return tmpret15
def _ats2pypre_stream_aux_5(env0, arg0):
tmpret16 = None
funlab_py = None
tmplab_py = None
#__patsflab__ats2pypre_stream_aux_5
tmpret16 = [0, _ats2pypre_stream_patsfun_6__closurerize(env0, arg0)]
return tmpret16
def _ats2pypre_stream_patsfun_6(env0, env1):
tmpret17 = None
tmp18 = None
tmp19 = None
tmp20 = None
funlab_py = None
tmplab_py = None
#__patsflab__ats2pypre_stream_patsfun_6
tmp18 = env0[0](env0, env1)
tmp20 = ats2pypre_add_int1_int1(env1, 1)
tmp19 = _ats2pypre_stream_aux_5(env0, tmp20)
tmpret17 = (tmp18, tmp19)
return tmpret17
def ats2pypre_stream2cloref_exn(arg0):
tmpret21 = None
tmp22 = None
funlab_py = None
tmplab_py = None
#__patsflab_stream2cloref_exn
tmp22 = ats2pypre_ref(arg0)
tmpret21 = _ats2pypre_stream_patsfun_8__closurerize(tmp22)
return tmpret21
def _ats2pypre_stream_patsfun_8(env0):
tmpret23 = None
tmp24 = None
tmp25 = None
tmp26 = None
tmp27 = None
funlab_py = None
tmplab_py = None
#__patsflab__ats2pypre_stream_patsfun_8
tmp24 = ats2pypre_ref_get_elt(env0)
ATSPMVlazyval_eval(tmp24); tmp25 = tmp24[1]
if(ATSCKptrisnull(tmp25)): ATSINScaseof_fail("/home/hwxi/Research/ATS-Postiats-contrib/contrib/libatscc/DATS/stream.dats: 1532(line=114, offs=5) -- 1556(line=114, offs=29)");
tmp26 = tmp25[0]
tmp27 = tmp25[1]
ats2pypre_ref_set_elt(env0, tmp27)
tmpret23 = tmp26
return tmpret23
def ats2pypre_stream2cloref_opt(arg0):
tmpret29 = None
tmp30 = None
funlab_py = None
tmplab_py = None
#__patsflab_stream2cloref_opt
tmp30 = ats2pypre_ref(arg0)
tmpret29 = _ats2pypre_stream_patsfun_10__closurerize(tmp30)
return tmpret29
def _ats2pypre_stream_patsfun_10(env0):
tmpret31 = None
tmp32 = None
tmp33 = None
tmp34 = None
tmp35 = None
funlab_py = None
tmplab_py = None
mbranch_1 = None
def __atstmplab8():
nonlocal env0
nonlocal tmpret31, tmp32, tmp33, tmp34, tmp35
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
if(ATSCKptriscons(tmp33)): tmplab_py = 4 ; return#__atstmplab11
__atstmplab9()
return
def __atstmplab9():
nonlocal env0
nonlocal tmpret31, tmp32, tmp33, tmp34, tmp35
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
tmpret31 = None
return
def __atstmplab10():
nonlocal env0
nonlocal tmpret31, tmp32, tmp33, tmp34, tmp35
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
__atstmplab11()
return
def __atstmplab11():
nonlocal env0
nonlocal tmpret31, tmp32, tmp33, tmp34, tmp35
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
tmp34 = tmp33[0]
tmp35 = tmp33[1]
ats2pypre_ref_set_elt(env0, tmp35)
tmpret31 = (tmp34, )
return
mbranch_1 = { 1: __atstmplab8, 2: __atstmplab9, 3: __atstmplab10, 4: __atstmplab11 }
#__patsflab__ats2pypre_stream_patsfun_10
tmp32 = ats2pypre_ref_get_elt(env0)
ATSPMVlazyval_eval(tmp32); tmp33 = tmp32[1]
#ATScaseofseq_beg
tmplab_py = 1
while(1):
mbranch_1.get(tmplab_py)()
if (tmplab_py == 0): break
#ATScaseofseq_end
return tmpret31
def ats2pypre_stream2cloref_last(arg0, arg1):
tmpret37 = None
tmp38 = None
tmp39 = None
funlab_py = None
tmplab_py = None
#__patsflab_stream2cloref_last
tmp38 = ats2pypre_ref(arg0)
tmp39 = ats2pypre_ref(arg1)
tmpret37 = _ats2pypre_stream_patsfun_12__closurerize(tmp38, tmp39)
return tmpret37
def _ats2pypre_stream_patsfun_12(env0, env1):
tmpret40 = None
tmp41 = None
tmp42 = None
tmp43 = None
tmp44 = None
funlab_py = None
tmplab_py = None
mbranch_1 = None
def __atstmplab12():
nonlocal env0, env1
nonlocal tmpret40, tmp41, tmp42, tmp43, tmp44
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
if(ATSCKptriscons(tmp42)): tmplab_py = 4 ; return#__atstmplab15
__atstmplab13()
return
def __atstmplab13():
nonlocal env0, env1
nonlocal tmpret40, tmp41, tmp42, tmp43, tmp44
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
tmpret40 = ats2pypre_ref_get_elt(env1)
return
def __atstmplab14():
nonlocal env0, env1
nonlocal tmpret40, tmp41, tmp42, tmp43, tmp44
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
__atstmplab15()
return
def __atstmplab15():
nonlocal env0, env1
nonlocal tmpret40, tmp41, tmp42, tmp43, tmp44
nonlocal funlab_py, tmplab_py
nonlocal mbranch_1
tmplab_py = 0
tmp43 = tmp42[0]
tmp44 = tmp42[1]
ats2pypre_ref_set_elt(env0, tmp44)
ats2pypre_ref_set_elt(env1, tmp43)
tmpret40 = tmp43
return
mbranch_1 = { 1: __atstmplab12, 2: __atstmplab13, 3: __atstmplab14, 4: __atstmplab15 }
#__patsflab__ats2pypre_stream_patsfun_12
tmp41 = ats2pypre_ref_get_elt(env0)
ATSPMVlazyval_eval(tmp41); tmp42 = tmp41[1]
#ATScaseofseq_beg
tmplab_py = 1
while(1):
mbranch_1.get(tmplab_py)()
if (tmplab_py == 0): break
#ATScaseofseq_end
return tmpret40
######
##
## end-of-compilation-unit
##
######
| githwxi/ATS-Postiats-frozen | projects/SMALL/openshift-flask-2016-07-20/libatscc2py3/ats2pypre_stream_dats.py | Python | mit | 10,563 |
# Copyright (C) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import copy
import logging
import os
import re
import signal
import sys
import subprocess
import threading
import time
from multiprocessing.pool import ThreadPool
from webkitpy.common.system.executive import ScriptError
from webkitpy.layout_tests.breakpad.dump_reader_multipart import DumpReaderAndroid
from webkitpy.layout_tests.models import test_run_results
from webkitpy.layout_tests.port import base
from webkitpy.layout_tests.port import linux
from webkitpy.layout_tests.port import driver
from webkitpy.layout_tests.port import factory
from webkitpy.layout_tests.port import server_process
from webkitpy.common.system.profiler import SingleFileOutputProfiler
_log = logging.getLogger(__name__)
# The root directory for test resources, which has the same structure as the
# source root directory of Chromium.
# This path is defined in Chromium's base/test/test_support_android.cc.
DEVICE_SOURCE_ROOT_DIR = '/data/local/tmp/'
# The layout tests directory on device, which has two usages:
# 1. as a virtual path in file urls that will be bridged to HTTP.
# 2. pointing to some files that are pushed to the device for tests that
# don't work on file-over-http (e.g. blob protocol tests).
DEVICE_WEBKIT_BASE_DIR = DEVICE_SOURCE_ROOT_DIR + 'third_party/WebKit/'
DEVICE_LAYOUT_TESTS_DIR = DEVICE_WEBKIT_BASE_DIR + 'LayoutTests/'
SCALING_GOVERNORS_PATTERN = "/sys/devices/system/cpu/cpu*/cpufreq/scaling_governor"
KPTR_RESTRICT_PATH = "/proc/sys/kernel/kptr_restrict"
# All the test cases are still served to the test runner through file protocol,
# but we use a file-to-http feature to bridge the file request to host's http
# server to get the real test files and corresponding resources.
# See webkit/support/platform_support_android.cc for the other side of this bridge.
PERF_TEST_PATH_PREFIX = '/all-perf-tests'
LAYOUT_TEST_PATH_PREFIX = '/all-tests'
# All ports the Android forwarder to forward.
# 8000, 8080 and 8443 are for http/https tests.
# 8880 and 9323 are for websocket tests
# (see http_server.py, apache_http_server.py and websocket_server.py).
FORWARD_PORTS = '8000 8080 8443 8880 9323'
MS_TRUETYPE_FONTS_DIR = '/usr/share/fonts/truetype/msttcorefonts/'
MS_TRUETYPE_FONTS_PACKAGE = 'ttf-mscorefonts-installer'
# Timeout in seconds to wait for starting/stopping the driver.
DRIVER_START_STOP_TIMEOUT_SECS = 10
HOST_FONT_FILES = [
[[MS_TRUETYPE_FONTS_DIR], 'Arial.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Arial_Bold.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Arial_Bold_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Arial_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Comic_Sans_MS.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Comic_Sans_MS_Bold.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Courier_New.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Courier_New_Bold.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Courier_New_Bold_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Courier_New_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Georgia.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Georgia_Bold.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Georgia_Bold_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Georgia_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Impact.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Trebuchet_MS.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Trebuchet_MS_Bold.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Trebuchet_MS_Bold_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Trebuchet_MS_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Times_New_Roman.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Times_New_Roman_Bold.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Times_New_Roman_Bold_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Times_New_Roman_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Verdana.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Verdana_Bold.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Verdana_Bold_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
[[MS_TRUETYPE_FONTS_DIR], 'Verdana_Italic.ttf', MS_TRUETYPE_FONTS_PACKAGE],
# The Microsoft font EULA
[['/usr/share/doc/ttf-mscorefonts-installer/'], 'READ_ME!.gz', MS_TRUETYPE_FONTS_PACKAGE],
# Other fonts: Arabic, CJK, Indic, Thai, etc.
[['/usr/share/fonts/truetype/ttf-dejavu/'], 'DejaVuSans.ttf', 'ttf-dejavu'],
[['/usr/share/fonts/truetype/kochi/'], 'kochi-mincho.ttf', 'ttf-kochi-mincho'],
[['/usr/share/fonts/truetype/ttf-indic-fonts-core/'], 'lohit_hi.ttf', 'ttf-indic-fonts-core'],
[['/usr/share/fonts/truetype/ttf-indic-fonts-core/'], 'lohit_ta.ttf', 'ttf-indic-fonts-core'],
[['/usr/share/fonts/truetype/ttf-indic-fonts-core/'], 'MuktiNarrow.ttf', 'ttf-indic-fonts-core'],
[['/usr/share/fonts/truetype/thai/', '/usr/share/fonts/truetype/tlwg/'], 'Garuda.ttf', 'fonts-tlwg-garuda'],
[['/usr/share/fonts/truetype/ttf-indic-fonts-core/', '/usr/share/fonts/truetype/ttf-punjabi-fonts/'], 'lohit_pa.ttf', 'ttf-indic-fonts-core'],
]
# Test resources that need to be accessed as files directly.
# Each item can be the relative path of a directory or a file.
TEST_RESOURCES_TO_PUSH = [
# Blob tests need to access files directly.
'editing/pasteboard/resources',
'fast/files/resources',
'http/tests/local/resources',
'http/tests/local/formdata/resources',
# User style URLs are accessed as local files in webkit_support.
'http/tests/security/resources/cssStyle.css',
# Media tests need to access audio/video as files.
'media/content',
'compositing/resources/video.mp4',
]
MD5SUM_DEVICE_FILE_NAME = 'md5sum_bin'
MD5SUM_HOST_FILE_NAME = 'md5sum_bin_host'
MD5SUM_DEVICE_PATH = '/data/local/tmp/' + MD5SUM_DEVICE_FILE_NAME
# Information required when running layout tests using content_shell as the test runner.
class ContentShellDriverDetails():
def device_cache_directory(self):
return self.device_directory() + 'cache/'
def device_fonts_directory(self):
return self.device_directory() + 'fonts/'
def device_forwarder_path(self):
return self.device_directory() + 'forwarder'
def device_fifo_directory(self):
return '/data/data/' + self.package_name() + '/files/'
def apk_name(self):
return 'apks/ContentShell.apk'
def package_name(self):
return 'org.chromium.content_shell_apk'
def activity_name(self):
return self.package_name() + '/.ContentShellActivity'
def library_name(self):
return 'libcontent_shell_content_view.so'
def additional_resources(self):
return ['content_resources.pak', 'content_shell.pak', 'shell_resources.pak']
def command_line_file(self):
return '/data/local/tmp/content-shell-command-line'
def device_crash_dumps_directory(self):
return '/data/local/tmp/content-shell-crash-dumps'
def additional_command_line_flags(self, use_breakpad):
flags = ['--encode-binary']
if use_breakpad:
flags.extend(['--enable-crash-reporter', '--crash-dumps-dir=%s' % self.device_crash_dumps_directory()])
return flags
def device_directory(self):
return DEVICE_SOURCE_ROOT_DIR + 'content_shell/'
# The AndroidCommands class encapsulates commands to communicate with an attached device.
class AndroidCommands(object):
_adb_command_path = None
_adb_command_path_options = []
def __init__(self, executive, device_serial, debug_logging):
self._executive = executive
self._device_serial = device_serial
self._debug_logging = debug_logging
# Local public methods.
def file_exists(self, full_path):
assert full_path.startswith('/')
return self.run(['shell', 'ls', '-d', full_path]).strip() == full_path
def push(self, host_path, device_path, ignore_error=False):
return self.run(['push', host_path, device_path], ignore_error=ignore_error)
def pull(self, device_path, host_path, ignore_error=False):
return self.run(['pull', device_path, host_path], ignore_error=ignore_error)
def mkdir(self, device_path, chmod=None):
self.run(['shell', 'mkdir', '-p', device_path])
if chmod:
self.run(['shell', 'chmod', chmod, device_path])
def restart_adb(self):
pids = self.extract_pids('adbd')
if pids:
output = self.run(['shell', 'kill', '-' + str(signal.SIGTERM)] + pids)
self.run(['wait-for-device'])
def restart_as_root(self):
output = self.run(['root'])
if 'adbd is already running as root' in output:
return
elif not 'restarting adbd as root' in output:
self._log_error('Unrecognized output from adb root: %s' % output)
self.run(['wait-for-device'])
def extract_pids(self, process_name):
pids = []
output = self.run(['shell', 'ps'])
for line in output.splitlines():
data = line.split()
try:
if process_name in data[-1]: # name is in the last column
if process_name == data[-1]:
pids.insert(0, data[1]) # PID is in the second column
else:
pids.append(data[1])
except IndexError:
pass
return pids
def run(self, command, ignore_error=False):
self._log_debug('Run adb command: ' + str(command))
if ignore_error:
error_handler = self._executive.ignore_error
else:
error_handler = None
result = self._executive.run_command(self.adb_command() + command, error_handler=error_handler, debug_logging=self._debug_logging)
# We limit the length to avoid outputting too verbose commands, such as "adb logcat".
self._log_debug('Run adb result: ' + result[:80])
return result
def get_serial(self):
return self._device_serial
def adb_command(self):
return [AndroidCommands.adb_command_path(self._executive, self._debug_logging), '-s', self._device_serial]
@staticmethod
def set_adb_command_path_options(paths):
AndroidCommands._adb_command_path_options = paths
@staticmethod
def adb_command_path(executive, debug_logging):
if AndroidCommands._adb_command_path:
return AndroidCommands._adb_command_path
assert AndroidCommands._adb_command_path_options, 'No commands paths have been set to look for the "adb" command.'
command_path = None
command_version = None
for path_option in AndroidCommands._adb_command_path_options:
path_version = AndroidCommands._determine_adb_version(path_option, executive, debug_logging)
if not path_version:
continue
if command_version != None and path_version < command_version:
continue
command_path = path_option
command_version = path_version
assert command_path, 'Unable to locate the "adb" command. Are you using an Android checkout of Chromium?'
AndroidCommands._adb_command_path = command_path
return command_path
# Local private methods.
def _log_error(self, message):
_log.error('[%s] %s' % (self._device_serial, message))
def _log_info(self, message):
_log.info('[%s] %s' % (self._device_serial, message))
def _log_debug(self, message):
if self._debug_logging:
_log.debug('[%s] %s' % (self._device_serial, message))
@staticmethod
def _determine_adb_version(adb_command_path, executive, debug_logging):
re_version = re.compile('^.*version ([\d\.]+)')
try:
output = executive.run_command([adb_command_path, 'version'], error_handler=executive.ignore_error,
debug_logging=debug_logging)
except OSError:
return None
result = re_version.match(output)
if not output or not result:
return None
return [int(n) for n in result.group(1).split('.')]
# A class to encapsulate device status and information, such as the AndroidCommands
# instances and whether the device has been set up.
class AndroidDevices(object):
# Percentage of battery a device needs to have in order for it to be considered
# to participate in running the layout tests.
MINIMUM_BATTERY_PERCENTAGE = 30
def __init__(self, executive, default_device=None, debug_logging=False):
self._usable_devices = []
self._default_device = default_device
self._prepared_devices = []
self._debug_logging = debug_logging
def prepared_devices(self):
return self._prepared_devices
def usable_devices(self, executive):
if self._usable_devices:
return self._usable_devices
if self._default_device:
self._usable_devices = [AndroidCommands(executive, self._default_device, self._debug_logging)]
return self._usable_devices
# Example "adb devices" command output:
# List of devices attached
# 0123456789ABCDEF device
re_device = re.compile('^([a-zA-Z0-9_:.-]+)\tdevice$', re.MULTILINE)
result = executive.run_command([AndroidCommands.adb_command_path(executive, debug_logging=self._debug_logging), 'devices'],
error_handler=executive.ignore_error, debug_logging=self._debug_logging)
devices = re_device.findall(result)
if not devices:
return []
for device_serial in sorted(devices):
commands = AndroidCommands(executive, device_serial, self._debug_logging)
if self._battery_level_for_device(commands) < AndroidDevices.MINIMUM_BATTERY_PERCENTAGE:
_log.warning('Device with serial "%s" skipped because it has less than %d percent battery.'
% (commands.get_serial(), AndroidDevices.MINIMUM_BATTERY_PERCENTAGE))
continue
if not self._is_device_screen_on(commands):
_log.warning('Device with serial "%s" skipped because the screen must be on.' % commands.get_serial())
continue
self._usable_devices.append(commands)
return self._usable_devices
def get_device(self, executive, device_index):
devices = self.usable_devices(executive)
if device_index >= len(devices):
raise AssertionError('Device index exceeds number of usable devices.')
return devices[device_index]
def is_device_prepared(self, device_serial):
return device_serial in self._prepared_devices
def set_device_prepared(self, device_serial):
self._prepared_devices.append(device_serial)
# Private methods
def _battery_level_for_device(self, commands):
battery_status = commands.run(['shell', 'dumpsys', 'battery'])
if 'Error' in battery_status or "Can't find service: battery" in battery_status:
_log.warning('Unable to read the battery level from device with serial "%s".' % commands.get_serial())
return 0
return int(re.findall('level: (\d+)', battery_status)[0])
def _is_device_screen_on(self, commands):
power_status = commands.run(['shell', 'dumpsys', 'power'])
return 'mScreenOn=true' in power_status or 'mScreenOn=SCREEN_ON_BIT' in power_status or 'Display Power: state=ON' in power_status
class AndroidPort(base.Port):
port_name = 'android'
# Avoid initializing the adb path [worker count]+1 times by storing it as a static member.
_adb_path = None
SUPPORTED_VERSIONS = ('android')
FALLBACK_PATHS = {'icecreamsandwich': ['android'] + linux.LinuxPort.latest_platform_fallback_path()}
# Android has aac and mp3 codecs built in.
PORT_HAS_AUDIO_CODECS_BUILT_IN = True
BUILD_REQUIREMENTS_URL = 'https://www.chromium.org/developers/how-tos/android-build-instructions'
def __init__(self, host, port_name, **kwargs):
super(AndroidPort, self).__init__(host, port_name, **kwargs)
self._operating_system = 'android'
self._version = 'icecreamsandwich'
self._host_port = factory.PortFactory(host).get('chromium', **kwargs)
self._server_process_constructor = self._android_server_process_constructor
if not self.get_option('disable_breakpad'):
self._dump_reader = DumpReaderAndroid(host, self._build_path())
if self.driver_name() != self.CONTENT_SHELL_NAME:
raise AssertionError('Layout tests on Android only support content_shell as the driver.')
self._driver_details = ContentShellDriverDetails()
# Initialize the AndroidDevices class which tracks available devices.
default_device = None
if hasattr(self._options, 'adb_device') and len(self._options.adb_device):
default_device = self._options.adb_device
self._debug_logging = self.get_option('android_logging')
self._devices = AndroidDevices(self._executive, default_device, self._debug_logging)
# Tell AndroidCommands where to search for the "adb" command.
AndroidCommands.set_adb_command_path_options(['adb',
self.path_from_chromium_base('third_party', 'android_tools', 'sdk', 'platform-tools', 'adb')])
prepared_devices = self.get_option('prepared_devices', [])
for serial in prepared_devices:
self._devices.set_device_prepared(serial)
def default_smoke_test_only(self):
return True
# Local public methods.
def path_to_forwarder(self):
return self._build_path('forwarder')
def path_to_md5sum(self):
return self._build_path(MD5SUM_DEVICE_FILE_NAME)
def path_to_md5sum_host(self):
return self._build_path(MD5SUM_HOST_FILE_NAME)
def additional_driver_flag(self):
return super(AndroidPort, self).additional_driver_flag() + self._driver_details.additional_command_line_flags(use_breakpad=not self.get_option('disable_breakpad'))
def default_timeout_ms(self):
# Android platform has less computing power than desktop platforms.
# Using 10 seconds allows us to pass most slow tests which are not
# marked as slow tests on desktop platforms.
return 10 * 1000
def driver_stop_timeout(self):
# The driver doesn't respond to closing stdin, so we might as well stop the driver immediately.
return 0.0
def default_child_processes(self):
usable_devices = self._devices.usable_devices(self._executive)
if not usable_devices:
raise test_run_results.TestRunException(test_run_results.NO_DEVICES_EXIT_STATUS, "Unable to find any attached Android devices.")
return len(usable_devices)
def max_drivers_per_process(self):
# Android falls over when we try to run multiple content_shells per worker.
# See https://codereview.chromium.org/1158323009/
return 1
def check_wdiff(self, logging=True):
return self._host_port.check_wdiff(logging)
def check_build(self, needs_http, printer):
exit_status = super(AndroidPort, self).check_build(needs_http, printer)
if exit_status:
return exit_status
result = self._check_file_exists(self.path_to_md5sum(), 'md5sum utility')
result = self._check_file_exists(self.path_to_md5sum_host(), 'md5sum host utility') and result
result = self._check_file_exists(self.path_to_forwarder(), 'forwarder utility') and result
if not result:
# There is a race condition in adb at least <= 4.3 on Linux that causes it to go offline periodically
# We set the processor affinity for any running adb process to attempt to work around this.
# See crbug.com/268450
if self.host.platform.is_linux():
pids = self._executive.running_pids(lambda name: 'adb' in name)
if not pids:
# Apparently adb is not running, which is unusual. Running any adb command should start it.
self._executive.run_command(['adb', 'devices'])
pids = self._executive.running_pids(lambda name: 'adb' in name)
if not pids:
_log.error("The adb daemon does not appear to be running.")
return False
for pid in pids:
self._executive.run_command(['taskset', '-p', '-c', '0', str(pid)])
if not result:
_log.error('For complete Android build requirements, please see:')
_log.error('')
_log.error(' https://www.chromium.org/developers/how-tos/android-build-instructions')
return test_run_results.UNEXPECTED_ERROR_EXIT_STATUS
return self._check_devices(printer)
def _check_devices(self, printer):
# Printer objects aren't threadsafe, so we need to protect calls to them.
lock = threading.Lock()
pool = None
# Push the executables and other files to the devices; doing this now
# means we can do this in parallel in the manager process and not mix
# this in with starting and stopping workers.
def setup_device(worker_number):
d = self.create_driver(worker_number)
serial = d._android_commands.get_serial()
def log_safely(msg, throttled=True):
if throttled:
callback = printer.write_throttled_update
else:
callback = printer.write_update
lock.acquire()
try:
callback("[%s] %s" % (serial, msg))
finally:
lock.release()
log_safely("preparing device", throttled=False)
try:
d._setup_test(log_safely)
log_safely("device prepared", throttled=False)
except (ScriptError, driver.DeviceFailure) as e:
lock.acquire()
_log.warning("[%s] failed to prepare_device: %s" % (serial, str(e)))
lock.release()
except KeyboardInterrupt:
if pool:
pool.terminate()
# FIXME: It would be nice if we knew how many workers we needed.
num_workers = self.default_child_processes()
num_child_processes = int(self.get_option('child_processes'))
if num_child_processes:
num_workers = min(num_workers, num_child_processes)
if num_workers > 1:
pool = ThreadPool(num_workers)
try:
pool.map(setup_device, range(num_workers))
except KeyboardInterrupt:
pool.terminate()
raise
else:
setup_device(0)
if not self._devices.prepared_devices():
_log.error('Could not prepare any devices for testing.')
return test_run_results.NO_DEVICES_EXIT_STATUS
return test_run_results.OK_EXIT_STATUS
def setup_test_run(self):
super(AndroidPort, self).setup_test_run()
# By setting this on the options object, we can propagate the list
# of prepared devices to the workers (it is read in __init__()).
if self._devices._prepared_devices:
self._options.prepared_devices = self._devices.prepared_devices()
else:
# We were called with --no-build, so assume the devices are up to date.
self._options.prepared_devices = [d.get_serial() for d in self._devices.usable_devices(self.host.executive)]
def num_workers(self, requested_num_workers):
return min(len(self._options.prepared_devices), requested_num_workers)
def check_sys_deps(self, needs_http):
for (font_dirs, font_file, package) in HOST_FONT_FILES:
exists = False
for font_dir in font_dirs:
font_path = font_dir + font_file
if self._check_file_exists(font_path, '', logging=False):
exists = True
break
if not exists:
_log.error('You are missing %s under %s. Try installing %s. See build instructions.' % (font_file, font_dirs, package))
return test_run_results.SYS_DEPS_EXIT_STATUS
return test_run_results.OK_EXIT_STATUS
def requires_http_server(self):
"""Chromium Android runs tests on devices, and uses the HTTP server to
serve the actual layout tests to the test driver."""
return True
def start_http_server(self, additional_dirs, number_of_drivers):
additional_dirs[PERF_TEST_PATH_PREFIX] = self.perf_tests_dir()
additional_dirs[LAYOUT_TEST_PATH_PREFIX] = self.layout_tests_dir()
super(AndroidPort, self).start_http_server(additional_dirs, number_of_drivers)
def create_driver(self, worker_number, no_timeout=False):
return ChromiumAndroidDriver(self, worker_number, pixel_tests=self.get_option('pixel_tests'),
driver_details=self._driver_details,
android_devices=self._devices,
# Force no timeout to avoid test driver timeouts before NRWT.
no_timeout=True)
def driver_cmd_line(self):
# Override to return the actual test driver's command line.
return self.create_driver(0)._android_driver_cmd_line(self.get_option('pixel_tests'), [])
def clobber_old_port_specific_results(self):
if not self.get_option('disable_breakpad'):
self._dump_reader.clobber_old_results()
# Overridden protected methods.
def _build_path(self, *comps):
return self._host_port._build_path(*comps)
def _build_path_with_target(self, target, *comps):
return self._host_port._build_path_with_target(target, *comps)
def path_to_apache(self):
return self._host_port.path_to_apache()
def path_to_apache_config_file(self):
return self._host_port.path_to_apache_config_file()
def _path_to_driver(self, target=None):
return self._build_path_with_target(target, self._driver_details.apk_name())
def _path_to_helper(self):
return None
def _path_to_image_diff(self):
return self._host_port._path_to_image_diff()
def _path_to_wdiff(self):
return self._host_port._path_to_wdiff()
def _shut_down_http_server(self, pid):
return self._host_port._shut_down_http_server(pid)
def _driver_class(self):
return ChromiumAndroidDriver
# Local private methods.
@staticmethod
def _android_server_process_constructor(port, server_name, cmd_line, env=None, logging=False):
# We need universal_newlines=True, because 'adb shell' for some unknown reason
# does newline conversion of unix-style LF into win-style CRLF (and we need
# to convert that back). This can cause headaches elsewhere because
# server_process' stdout and stderr are now unicode file-like objects,
# not binary file-like objects like all of the other ports are.
# FIXME: crbug.com/496983.
return server_process.ServerProcess(port, server_name, cmd_line, env,
universal_newlines=True, treat_no_data_as_crash=True, logging=logging)
class AndroidPerf(SingleFileOutputProfiler):
_cached_perf_host_path = None
_have_searched_for_perf_host = False
def __init__(self, host, executable_path, output_dir, android_commands, symfs_path, kallsyms_path, identifier=None):
super(AndroidPerf, self).__init__(host, executable_path, output_dir, "data", identifier)
self._android_commands = android_commands
self._perf_process = None
self._symfs_path = symfs_path
self._kallsyms_path = kallsyms_path
def check_configuration(self):
# Check that perf is installed
if not self._android_commands.file_exists('/system/bin/perf'):
_log.error("Cannot find /system/bin/perf on device %s" % self._android_commands.get_serial())
return False
# Check that the device is a userdebug build (or at least has the necessary libraries).
if self._android_commands.run(['shell', 'getprop', 'ro.build.type']).strip() != 'userdebug':
_log.error("Device %s is not flashed with a userdebug build of Android" % self._android_commands.get_serial())
return False
# FIXME: Check that the binary actually is perf-able (has stackframe pointers)?
# objdump -s a function and make sure it modifies the fp?
# Instruct users to rebuild after export GYP_DEFINES="profiling=1 $GYP_DEFINES"
return True
def print_setup_instructions(self):
_log.error("""
perf on android requires a 'userdebug' build of Android, see:
http://source.android.com/source/building-devices.html"
The perf command can be built from:
https://android.googlesource.com/platform/external/linux-tools-perf/
and requires libefl, libebl, libdw, and libdwfl available in:
https://android.googlesource.com/platform/external/elfutils/
The test driver must be built with profiling=1, make sure you've done:
export GYP_DEFINES="profiling=1 $GYP_DEFINES"
update-webkit --chromium-android
build-webkit --chromium-android
Googlers should read:
http://goto.google.com/cr-android-perf-howto
""")
def attach_to_pid(self, pid):
assert(pid)
assert(self._perf_process == None)
# FIXME: This can't be a fixed timeout!
cmd = self._android_commands.adb_command() + ['shell', 'perf', 'record', '-g', '-p', pid, 'sleep', 30]
self._perf_process = self._host.executive.popen(cmd)
def _perf_version_string(self, perf_path):
try:
return self._host.executive.run_command([perf_path, '--version'])
except:
return None
def _find_perfhost_binary(self):
perfhost_version = self._perf_version_string('perfhost_linux')
if perfhost_version:
return 'perfhost_linux'
perf_version = self._perf_version_string('perf')
if perf_version:
return 'perf'
return None
def _perfhost_path(self):
if self._have_searched_for_perf_host:
return self._cached_perf_host_path
self._have_searched_for_perf_host = True
self._cached_perf_host_path = self._find_perfhost_binary()
return self._cached_perf_host_path
def _first_ten_lines_of_profile(self, perf_output):
match = re.search("^#[^\n]*\n((?: [^\n]*\n){1,10})", perf_output, re.MULTILINE)
return match.group(1) if match else None
def profile_after_exit(self):
perf_exitcode = self._perf_process.wait()
if perf_exitcode != 0:
_log.debug("Perf failed (exit code: %i), can't process results." % perf_exitcode)
return
self._android_commands.pull('/data/perf.data', self._output_path)
perfhost_path = self._perfhost_path()
perfhost_report_command = [
'report',
'--input', self._output_path,
'--symfs', self._symfs_path,
'--kallsyms', self._kallsyms_path,
]
if perfhost_path:
perfhost_args = [perfhost_path] + perfhost_report_command + ['--call-graph', 'none']
perf_output = self._host.executive.run_command(perfhost_args)
# We could save off the full -g report to a file if users found that useful.
_log.debug(self._first_ten_lines_of_profile(perf_output))
else:
_log.debug("""
Failed to find perfhost_linux binary, can't process samples from the device.
perfhost_linux can be built from:
https://android.googlesource.com/platform/external/linux-tools-perf/
also, modern versions of perf (available from apt-get install goobuntu-kernel-tools-common)
may also be able to process the perf.data files from the device.
Googlers should read:
http://goto.google.com/cr-android-perf-howto
for instructions on installing pre-built copies of perfhost_linux
http://crbug.com/165250 discusses making these pre-built binaries externally available.
""")
perfhost_display_patch = perfhost_path if perfhost_path else 'perfhost_linux'
_log.debug("To view the full profile, run:")
_log.debug(' '.join([perfhost_display_patch] + perfhost_report_command))
class ChromiumAndroidDriver(driver.Driver):
def __init__(self, port, worker_number, pixel_tests, driver_details, android_devices, no_timeout=False):
super(ChromiumAndroidDriver, self).__init__(port, worker_number, pixel_tests, no_timeout)
self._in_fifo_path = driver_details.device_fifo_directory() + 'stdin.fifo'
self._out_fifo_path = driver_details.device_fifo_directory() + 'test.fifo'
self._err_fifo_path = driver_details.device_fifo_directory() + 'stderr.fifo'
self._read_stdout_process = None
self._read_stderr_process = None
self._forwarder_process = None
self._original_governors = {}
self._original_kptr_restrict = None
self._android_devices = android_devices
self._android_commands = android_devices.get_device(port._executive, worker_number)
self._driver_details = driver_details
self._debug_logging = self._port._debug_logging
self._created_cmd_line = False
self._device_failed = False
# FIXME: If we taught ProfileFactory about "target" devices we could
# just use the logic in Driver instead of duplicating it here.
if self._port.get_option("profile"):
# FIXME: This should be done once, instead of per-driver!
symfs_path = self._find_or_create_symfs()
kallsyms_path = self._update_kallsyms_cache(symfs_path)
# FIXME: We should pass this some sort of "Bridge" object abstraction around ADB instead of a path/device pair.
self._profiler = AndroidPerf(self._port.host, self._port._path_to_driver(), self._port.results_directory(),
self._android_commands, symfs_path, kallsyms_path)
# FIXME: This is a layering violation and should be moved to Port.check_sys_deps
# once we have an abstraction around an adb_path/device_serial pair to make it
# easy to make these class methods on AndroidPerf.
if not self._profiler.check_configuration():
self._profiler.print_setup_instructions()
sys.exit(1)
else:
self._profiler = None
def __del__(self):
self._teardown_performance()
self._clean_up_cmd_line()
super(ChromiumAndroidDriver, self).__del__()
def _update_kallsyms_cache(self, output_dir):
kallsyms_name = "%s-kallsyms" % self._android_commands.get_serial()
kallsyms_cache_path = self._port.host.filesystem.join(output_dir, kallsyms_name)
self._android_commands.restart_as_root()
saved_kptr_restrict = self._android_commands.run(['shell', 'cat', KPTR_RESTRICT_PATH]).strip()
self._android_commands.run(['shell', 'echo', '0', '>', KPTR_RESTRICT_PATH])
_log.debug("Updating kallsyms file (%s) from device" % kallsyms_cache_path)
self._android_commands.pull("/proc/kallsyms", kallsyms_cache_path)
self._android_commands.run(['shell', 'echo', saved_kptr_restrict, '>', KPTR_RESTRICT_PATH])
return kallsyms_cache_path
def _find_or_create_symfs(self):
environment = self._port.host.copy_current_environment()
env = environment.to_dictionary()
fs = self._port.host.filesystem
if 'ANDROID_SYMFS' in env:
symfs_path = env['ANDROID_SYMFS']
else:
symfs_path = fs.join(self._port.results_directory(), 'symfs')
_log.debug("ANDROID_SYMFS not set, using %s" % symfs_path)
# find the installed path, and the path of the symboled built library
# FIXME: We should get the install path from the device!
symfs_library_path = fs.join(symfs_path, "data/app-lib/%s-1/%s" % (self._driver_details.package_name(), self._driver_details.library_name()))
built_library_path = self._port._build_path('lib', self._driver_details.library_name())
assert(fs.exists(built_library_path))
# FIXME: Ideally we'd check the sha1's first and make a soft-link instead of copying (since we probably never care about windows).
_log.debug("Updating symfs library (%s) from built copy (%s)" % (symfs_library_path, built_library_path))
fs.maybe_make_directory(fs.dirname(symfs_library_path))
fs.copyfile(built_library_path, symfs_library_path)
return symfs_path
def _setup_md5sum_and_push_data_if_needed(self, log_callback):
self._md5sum_path = self._port.path_to_md5sum()
if not self._android_commands.file_exists(MD5SUM_DEVICE_PATH):
if not self._android_commands.push(self._md5sum_path, MD5SUM_DEVICE_PATH):
self._abort('Could not push md5sum to device')
self._push_executable(log_callback)
self._push_fonts(log_callback)
self._push_test_resources(log_callback)
def _setup_test(self, log_callback):
# FIXME: Move this routine and its subroutines off of the AndroidDriver
# class and onto AndroidCommands or some other helper class, so that we
# can initialize the device without needing to create a driver.
if self._android_devices.is_device_prepared(self._android_commands.get_serial()):
return
self._android_commands.restart_adb()
self._android_commands.restart_as_root()
self._setup_md5sum_and_push_data_if_needed(log_callback)
self._setup_performance()
# Required by webkit_support::GetWebKitRootDirFilePath().
# Other directories will be created automatically by adb push.
self._android_commands.mkdir(DEVICE_SOURCE_ROOT_DIR + 'chrome')
# Allow the test driver to get full read and write access to the directory on the device,
# as well as for the FIFOs. We'll need a world writable directory.
self._android_commands.mkdir(self._driver_details.device_directory(), chmod='777')
self._android_commands.mkdir(self._driver_details.device_fifo_directory(), chmod='777')
# Make sure that the disk cache on the device resets to a clean state.
self._android_commands.run(['shell', 'rm', '-r', self._driver_details.device_cache_directory()])
# Mark this device as having been set up.
self._android_devices.set_device_prepared(self._android_commands.get_serial())
def _log_error(self, message):
_log.error('[%s] %s' % (self._android_commands.get_serial(), message))
def _log_warning(self, message):
_log.warning('[%s] %s' % (self._android_commands.get_serial(), message))
def _log_debug(self, message):
if self._debug_logging:
_log.debug('[%s] %s' % (self._android_commands.get_serial(), message))
def _abort(self, message):
self._device_failed = True
raise driver.DeviceFailure('[%s] %s' % (self._android_commands.get_serial(), message))
@staticmethod
def _extract_hashes_from_md5sum_output(md5sum_output):
assert md5sum_output
return [line.split(' ')[0] for line in md5sum_output]
def _files_match(self, host_file, device_file):
assert self._port.host.filesystem.exists(host_file)
device_hashes = self._extract_hashes_from_md5sum_output(
self._port.host.executive.popen(self._android_commands.adb_command() + ['shell', MD5SUM_DEVICE_PATH, device_file],
stdout=subprocess.PIPE).stdout)
host_hashes = self._extract_hashes_from_md5sum_output(
self._port.host.executive.popen(args=['%s_host' % self._md5sum_path, host_file],
stdout=subprocess.PIPE).stdout)
return host_hashes and device_hashes == host_hashes
def _push_file_if_needed(self, host_file, device_file, log_callback):
basename = self._port.host.filesystem.basename(host_file)
log_callback("checking %s" % basename)
if not self._files_match(host_file, device_file):
log_callback("pushing %s" % basename)
self._android_commands.push(host_file, device_file)
def _push_executable(self, log_callback):
self._push_file_if_needed(self._port.path_to_forwarder(), self._driver_details.device_forwarder_path(), log_callback)
for resource in self._driver_details.additional_resources():
self._push_file_if_needed(self._port._build_path(resource), self._driver_details.device_directory() + resource, log_callback)
self._push_file_if_needed(self._port._build_path('android_main_fonts.xml'), self._driver_details.device_directory() + 'android_main_fonts.xml', log_callback)
self._push_file_if_needed(self._port._build_path('android_fallback_fonts.xml'), self._driver_details.device_directory() + 'android_fallback_fonts.xml', log_callback)
log_callback("checking apk")
if self._files_match(self._port._build_path('apks', 'ContentShell.apk'),
'/data/app/org.chromium.content_shell_apk-1.apk'):
return
log_callback("uninstalling apk")
self._android_commands.run(['uninstall', self._driver_details.package_name()])
driver_host_path = self._port._path_to_driver()
log_callback("installing apk")
install_result = self._android_commands.run(['install', driver_host_path])
if install_result.find('Success') == -1:
self._abort('Failed to install %s onto device: %s' % (driver_host_path, install_result))
def _push_fonts(self, log_callback):
path_to_ahem_font = self._port._build_path('AHEM____.TTF')
self._push_file_if_needed(path_to_ahem_font, self._driver_details.device_fonts_directory() + 'AHEM____.TTF', log_callback)
for (host_dirs, font_file, package) in HOST_FONT_FILES:
for host_dir in host_dirs:
host_font_path = host_dir + font_file
if self._port._check_file_exists(host_font_path, '', logging=False):
self._push_file_if_needed(host_font_path, self._driver_details.device_fonts_directory() + font_file, log_callback)
def _push_test_resources(self, log_callback):
for resource in TEST_RESOURCES_TO_PUSH:
self._push_file_if_needed(self._port.layout_tests_dir() + '/' + resource, DEVICE_LAYOUT_TESTS_DIR + resource, log_callback)
def _get_last_stacktrace(self):
tombstones = self._android_commands.run(['shell', 'ls', '-n', '/data/tombstones/tombstone_*'])
if not tombstones or tombstones.startswith('/data/tombstones/tombstone_*: No such file or directory'):
self._log_error('The driver crashed, but no tombstone found!')
return ''
if tombstones.startswith('/data/tombstones/tombstone_*: Permission denied'):
# FIXME: crbug.com/321489 ... figure out why this happens.
self._log_error('The driver crashed, but we could not read the tombstones!')
return ''
tombstones = tombstones.rstrip().split('\n')
last_tombstone = None
for tombstone in tombstones:
# Format of fields:
# 0 1 2 3 4 5 6
# permission uid gid size date time filename
# -rw------- 1000 1000 45859 2011-04-13 06:00 tombstone_00
fields = tombstone.split()
if len(fields) != 7:
self._log_warning("unexpected line in tombstone output, skipping: '%s'" % tombstone)
continue
if not last_tombstone or fields[4] + fields[5] >= last_tombstone[4] + last_tombstone[5]:
last_tombstone = fields
else:
break
if not last_tombstone:
self._log_error('The driver crashed, but we could not find any valid tombstone!')
return ''
# Use Android tool vendor/google/tools/stack to convert the raw
# stack trace into a human readable format, if needed.
# It takes a long time, so don't do it here.
return '%s\n%s' % (' '.join(last_tombstone),
self._android_commands.run(['shell', 'cat', '/data/tombstones/' + last_tombstone[6]]))
def _get_logcat(self):
return self._android_commands.run(['logcat', '-d', '-v', 'threadtime'])
def _setup_performance(self):
# Disable CPU scaling and drop ram cache to reduce noise in tests
if not self._original_governors:
governor_files = self._android_commands.run(['shell', 'ls', SCALING_GOVERNORS_PATTERN])
if governor_files.find('No such file or directory') == -1:
for file in governor_files.split():
self._original_governors[file] = self._android_commands.run(['shell', 'cat', file]).strip()
self._android_commands.run(['shell', 'echo', 'performance', '>', file])
def _teardown_performance(self):
for file, original_content in self._original_governors.items():
self._android_commands.run(['shell', 'echo', original_content, '>', file])
self._original_governors = {}
def _get_crash_log(self, stdout, stderr, newer_than):
if not stdout:
stdout = ''
stdout += '********* [%s] Logcat:\n%s' % (self._android_commands.get_serial(), self._get_logcat())
if not stderr:
stderr = ''
stderr += '********* [%s] Tombstone file:\n%s' % (self._android_commands.get_serial(), self._get_last_stacktrace())
if not self._port.get_option('disable_breakpad'):
crashes = self._pull_crash_dumps_from_device()
for crash in crashes:
stderr += '********* [%s] breakpad minidump %s:\n%s' % (self._port.host.filesystem.basename(crash), self._android_commands.get_serial(), self._port._dump_reader._get_stack_from_dump(crash))
# The parent method expects stdout and stderr to be byte streams, but
# since adb shell does newline conversion, we used universal_newlines
# when launching the processes, and hence our stdout and stderr are
# text objects that need to be encoded back into bytes.
return super(ChromiumAndroidDriver, self)._get_crash_log(
stdout.encode('utf8', 'replace'),
stderr.encode('utf8', 'replace'),
newer_than)
def cmd_line(self, pixel_tests, per_test_args):
# The returned command line is used to start _server_process. In our case, it's an interactive 'adb shell'.
# The command line passed to the driver process is returned by _driver_cmd_line() instead.
return self._android_commands.adb_command() + ['shell']
def _android_driver_cmd_line(self, pixel_tests, per_test_args):
return driver.Driver.cmd_line(self, pixel_tests, per_test_args)
@staticmethod
def _loop_with_timeout(condition, timeout_secs):
deadline = time.time() + timeout_secs
while time.time() < deadline:
if condition():
return True
return False
def _all_pipes_created(self):
return (self._android_commands.file_exists(self._in_fifo_path) and
self._android_commands.file_exists(self._out_fifo_path) and
self._android_commands.file_exists(self._err_fifo_path))
def _remove_all_pipes(self):
for file in [self._in_fifo_path, self._out_fifo_path, self._err_fifo_path]:
self._android_commands.run(['shell', 'rm', file])
return (not self._android_commands.file_exists(self._in_fifo_path) and
not self._android_commands.file_exists(self._out_fifo_path) and
not self._android_commands.file_exists(self._err_fifo_path))
def start(self, pixel_tests, per_test_args, deadline):
# We override the default start() so that we can call _android_driver_cmd_line()
# instead of cmd_line().
new_cmd_line = self._android_driver_cmd_line(pixel_tests, per_test_args)
# Since _android_driver_cmd_line() is different than cmd_line() we need to provide
# our own mechanism for detecting when the process should be stopped.
if self._current_cmd_line is None:
self._current_android_cmd_line = None
if new_cmd_line != self._current_android_cmd_line:
self.stop()
self._current_android_cmd_line = new_cmd_line
super(ChromiumAndroidDriver, self).start(pixel_tests, per_test_args, deadline)
def _start(self, pixel_tests, per_test_args):
if not self._android_devices.is_device_prepared(self._android_commands.get_serial()):
raise driver.DeviceFailure("%s is not prepared in _start()" % self._android_commands.get_serial())
for retries in range(3):
try:
if self._start_once(pixel_tests, per_test_args):
return
except ScriptError as e:
self._abort('ScriptError("%s") in _start()' % str(e))
self._log_error('Failed to start the content_shell application. Retries=%d. Log:%s' % (retries, self._get_logcat()))
self.stop()
time.sleep(2)
self._abort('Failed to start the content_shell application multiple times. Giving up.')
def _start_once(self, pixel_tests, per_test_args):
super(ChromiumAndroidDriver, self)._start(pixel_tests, per_test_args, wait_for_ready=False)
self._log_debug('Starting forwarder')
self._forwarder_process = self._port._server_process_constructor(
self._port, 'Forwarder', self._android_commands.adb_command() + ['shell', '%s -no-spawn-daemon %s' % (self._driver_details.device_forwarder_path(), FORWARD_PORTS)])
self._forwarder_process.start()
deadline = time.time() + DRIVER_START_STOP_TIMEOUT_SECS
if not self._wait_for_server_process_output(self._forwarder_process, deadline, 'Forwarding device port'):
return False
self._android_commands.run(['logcat', '-c'])
cmd_line_file_path = self._driver_details.command_line_file()
original_cmd_line_file_path = cmd_line_file_path + '.orig'
if self._android_commands.file_exists(cmd_line_file_path) and not self._android_commands.file_exists(original_cmd_line_file_path):
# We check for both the normal path and the backup because we do not want to step
# on the backup. Otherwise, we'd clobber the backup whenever we changed the
# command line during the run.
self._android_commands.run(['shell', 'mv', cmd_line_file_path, original_cmd_line_file_path])
self._android_commands.run(['shell', 'echo'] + self._android_driver_cmd_line(pixel_tests, per_test_args) + ['>', self._driver_details.command_line_file()])
self._created_cmd_line = True
self._android_commands.run(['shell', 'rm', '-rf', self._driver_details.device_crash_dumps_directory()])
self._android_commands.mkdir(self._driver_details.device_crash_dumps_directory(), chmod='777')
start_result = self._android_commands.run(['shell', 'am', 'start', '-e', 'RunInSubThread', '-n', self._driver_details.activity_name()])
if start_result.find('Exception') != -1:
self._log_error('Failed to start the content_shell application. Exception:\n' + start_result)
return False
if not ChromiumAndroidDriver._loop_with_timeout(self._all_pipes_created, DRIVER_START_STOP_TIMEOUT_SECS):
return False
# Read back the shell prompt to ensure adb shell ready.
deadline = time.time() + DRIVER_START_STOP_TIMEOUT_SECS
self._server_process.start()
self._read_prompt(deadline)
self._log_debug('Interactive shell started')
# Start a process to read from the stdout fifo of the test driver and print to stdout.
self._log_debug('Redirecting stdout to ' + self._out_fifo_path)
self._read_stdout_process = self._port._server_process_constructor(
self._port, 'ReadStdout', self._android_commands.adb_command() + ['shell', 'cat', self._out_fifo_path])
self._read_stdout_process.start()
# Start a process to read from the stderr fifo of the test driver and print to stdout.
self._log_debug('Redirecting stderr to ' + self._err_fifo_path)
self._read_stderr_process = self._port._server_process_constructor(
self._port, 'ReadStderr', self._android_commands.adb_command() + ['shell', 'cat', self._err_fifo_path])
self._read_stderr_process.start()
self._log_debug('Redirecting stdin to ' + self._in_fifo_path)
self._server_process.write('cat >%s\n' % self._in_fifo_path)
# Combine the stdout and stderr pipes into self._server_process.
self._server_process.replace_outputs(self._read_stdout_process._proc.stdout, self._read_stderr_process._proc.stdout)
def deadlock_detector(processes, normal_startup_event):
if not ChromiumAndroidDriver._loop_with_timeout(lambda: normal_startup_event.is_set(), DRIVER_START_STOP_TIMEOUT_SECS):
# If normal_startup_event is not set in time, the main thread must be blocked at
# reading/writing the fifo. Kill the fifo reading/writing processes to let the
# main thread escape from the deadlocked state. After that, the main thread will
# treat this as a crash.
self._log_error('Deadlock detected. Processes killed.')
for i in processes:
i.kill()
# Start a thread to kill the pipe reading/writing processes on deadlock of the fifos during startup.
normal_startup_event = threading.Event()
threading.Thread(name='DeadlockDetector', target=deadlock_detector,
args=([self._server_process, self._read_stdout_process, self._read_stderr_process], normal_startup_event)).start()
# The test driver might crash during startup or when the deadlock detector hits
# a deadlock and kills the fifo reading/writing processes.
if not self._wait_for_server_process_output(self._server_process, deadline, '#READY'):
return False
# Inform the deadlock detector that the startup is successful without deadlock.
normal_startup_event.set()
self._log_debug("content_shell is ready")
return True
def _pid_from_android_ps_output(self, ps_output, package_name):
# ps output seems to be fixed width, we only care about the name and the pid
# u0_a72 21630 125 947920 59364 ffffffff 400beee4 S org.chromium.native_test
for line in ps_output.split('\n'):
if line.find(self._driver_details.package_name()) != -1:
match = re.match(r'\S+\s+(\d+)', line)
return int(match.group(1))
def _pid_on_target(self):
# FIXME: There must be a better way to do this than grepping ps output!
ps_output = self._android_commands.run(['shell', 'ps'])
return self._pid_from_android_ps_output(ps_output, self._driver_details.package_name())
def stop(self):
if not self._device_failed:
# Do not try to stop the application if there's something wrong with the device; adb may hang.
# FIXME: crbug.com/305040. Figure out if it's really hanging (and why).
self._android_commands.run(['shell', 'am', 'force-stop', self._driver_details.package_name()])
if self._read_stdout_process:
self._read_stdout_process.kill()
self._read_stdout_process = None
if self._read_stderr_process:
self._read_stderr_process.kill()
self._read_stderr_process = None
super(ChromiumAndroidDriver, self).stop()
if self._forwarder_process:
self._forwarder_process.kill()
self._forwarder_process = None
if self._android_devices.is_device_prepared(self._android_commands.get_serial()):
if not ChromiumAndroidDriver._loop_with_timeout(self._remove_all_pipes, DRIVER_START_STOP_TIMEOUT_SECS):
self._abort('Failed to remove fifo files. May be locked.')
self._clean_up_cmd_line()
def _pull_crash_dumps_from_device(self):
result = []
if not self._android_commands.file_exists(self._driver_details.device_crash_dumps_directory()):
return result
dumps = self._android_commands.run(['shell', 'ls', self._driver_details.device_crash_dumps_directory()])
for dump in dumps.splitlines():
device_dump = '%s/%s' % (self._driver_details.device_crash_dumps_directory(), dump)
local_dump = self._port._filesystem.join(self._port._dump_reader.crash_dumps_directory(), dump)
# FIXME: crbug.com/321489. Figure out why these commands would fail ...
err = self._android_commands.run(['shell', 'chmod', '777', device_dump])
if not err:
self._android_commands.pull(device_dump, local_dump)
if not err:
self._android_commands.run(['shell', 'rm', '-f', device_dump])
if self._port._filesystem.exists(local_dump):
result.append(local_dump)
return result
def _clean_up_cmd_line(self):
if not self._created_cmd_line:
return
cmd_line_file_path = self._driver_details.command_line_file()
original_cmd_line_file_path = cmd_line_file_path + '.orig'
if self._android_commands.file_exists(original_cmd_line_file_path):
self._android_commands.run(['shell', 'mv', original_cmd_line_file_path, cmd_line_file_path])
elif self._android_commands.file_exists(cmd_line_file_path):
self._android_commands.run(['shell', 'rm', cmd_line_file_path])
self._created_cmd_line = False
def _command_from_driver_input(self, driver_input):
command = super(ChromiumAndroidDriver, self)._command_from_driver_input(driver_input)
if command.startswith('/'):
fs = self._port._filesystem
# FIXME: what happens if command lies outside of the layout_tests_dir on the host?
relative_test_filename = fs.relpath(command, fs.dirname(self._port.layout_tests_dir()))
command = DEVICE_WEBKIT_BASE_DIR + relative_test_filename
return command
def _read_prompt(self, deadline):
last_char = ''
while True:
current_char = self._server_process.read_stdout(deadline, 1)
if current_char == ' ':
if last_char in ('#', '$'):
return
last_char = current_char
| highweb-project/highweb-webcl-html5spec | third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/android.py | Python | bsd-3-clause | 61,133 |
import numpy
import h5py
import cPickle as pickle
class Serializer(object):
def __init__(self, parent, socket = None):
self._parent = parent
self._socket = socket
if(socket):
import threading
self.lock = threading.Lock()
else:
# Use an internal server is there's no socket
self._server = Server(None)
def call(self, data):
if(self._socket):
with self.lock:
self.send(data)
return self.recv()
else:
if(data['func'] == 'attrs'):
ret, _ = self._serialize(self._server.handleRPC(data),[],data['fileName'],data['path'])
return self._deserialize(ret)
else:
ret, _ = self._serialize(self._server.handleRPC(data),[],None,None)
return self._deserialize(ret)
def recv(self):
data = pickle.loads(self._socket.recv())
ret = self._deserialize(data)
return ret
def _deserialize(self, data):
if(isinstance(data, dict)):
if('className' in data):
if(data['className'] == "Dataset"):
data = Dataset(self._parent, data['fileName'], data['path'])
elif(data['className'] == "Group"):
data = Group(self._parent, data['fileName'], data['path'])
elif(data['className'] == "Attributes"):
data = Attributes(self._parent, data['fileName'], data['path'])
elif(data['className'] == "SoftLink"):
data = h5py.SoftLink(data['path'])
elif(data['className'] == "ExternalLink"):
data = h5py.ExternalLink(data['fileName'],data['path'])
elif(data['className'] == "exception"):
exc_type = data['exc_type']
exc_value = data['exc_value']
raise exc_type(exc_value)
elif(data['className'] == "ndarray" and self._socket):
d = self._socket.recv()
data = numpy.frombuffer(buffer(d), dtype=data['dtype']).reshape(data['shape'])
elif(data['className'] == "File"):
pass
else:
raise RuntimeError('Unknown class: %s' % data['className'])
else:
# We need to sort to be able to receive any possible arrays
# in the correct order
for k in sorted(data.keys()):
data[k] = self._deserialize(data[k])
elif isinstance(data, list) or isinstance(data, tuple):
ldata = [None]*len(data)
for i in range(len(data)):
ldata[i] = self._deserialize(data[i])
data = type(data)(ldata)
return data
def send(self,data, fileName = None, path = None):
data, arrays = self._serialize(data, [], fileName, path)
flags = 0
if(len(arrays)):
import zmq
flags = zmq.SNDMORE
self._socket.send(pickle.dumps(data), flags)
for i in range(len(arrays)):
# When sending the last array change the flag back
if(i == len(arrays) -1):
flags = 0
self._socket.send(arrays[i], flags)
def _serialize(self, data, arrays, fileName, path):
if type(data) is h5py.Dataset:
data = dict(
className = "Dataset",
fileName = data.file.filename,
path = data.name
)
elif type(data) is h5py.Group:
data = dict(
className = "Group",
fileName = data.file.filename,
path = data.name
)
elif type(data) is h5py.AttributeManager:
data = dict(
className = "Attributes",
fileName = fileName,
path = path,
)
elif type(data) is h5py.File:
data = dict(
className = "File",
fileName = data.file.filename,
path = ''
)
elif type(data) is h5proxy.ExternalLink:
data = dict(
className = "ExternalLink",
fileName = data.filename,
path = data.path
)
elif type(data) is h5proxy.SoftLink:
data = dict(
className = "SoftLink",
path = data.path
)
elif isinstance(data, numpy.ndarray) and self._socket:
arrays.append(data)
data = dict(
className = "ndarray",
dtype = data.dtype,
shape = data.shape
)
elif isinstance(data, dict):
# We need to sort to be able to receive any possible arrays
# in the correct order
for k in sorted(data.keys()):
data[k], arrays = self._serialize(data[k], arrays, fileName, path)
elif isinstance(data, list) or isinstance(data, tuple):
ldata = [None]*len(data)
for i in range(len(data)):
ldata[i], arrays = self._serialize(data[i], arrays, fileName, path)
data = type(data)(ldata)
return data, arrays
from .h5proxy import Dataset,Group,File,Attributes, SoftLink, ExternalLink
import h5proxy
from .server import Server
| FilipeMaia/h5proxy | h5proxy/serializer.py | Python | bsd-2-clause | 5,488 |
import sys
import numpy as np
import unittest as ut
import espressomd
import espressomd.observables
import espressomd.lb
from espressomd import utils
import tests_common
@ut.skipIf(
not espressomd.has_features('LB_GPU') or espressomd.has_features('SHANCHEN'),
"LB_GPU not compiled in or SHANCHEN activated, can not check functionality.")
class TestCylindricalLBObservable(ut.TestCase):
"""
Testcase for the CylindricalFluxDensityObservable.
"""
system = espressomd.System(box_l=(10,10,10))
system.time_step = 0.01
system.cell_system.skin = 0.4
positions = []
params = {
'ids': range(10),
'center': [5.0, 5.0, 5.0], # center of the histogram
'axis': 'y',
'n_r_bins': 10, # number of bins in r
'n_phi_bins': 2, # -*- in phi
'n_z_bins': 2, # -*- in z
'min_r': 0.0,
'min_phi': -np.pi,
'min_z': -5.0,
'max_r': 5.0,
'max_phi': np.pi,
'max_z': 5.0,
}
@classmethod
def setUpClass(self):
self.lbf_gpu = espressomd.lb.LBFluidGPU(agrid=1.0, fric=1.0, dens=1.0, visc=1.0, tau=0.01)
self.lbf_cpu = espressomd.lb.LBFluid(agrid=1.0, fric=1.0, dens=1.0, visc=1.0, tau=0.01)
def tearDown(self):
del self.positions[:]
def swap_axis(self, arr, axis):
if axis == 'x':
arr = np.dot(tests_common.rotation_matrix(
[0, 1, 0], np.pi / 2.0), arr)
elif axis == 'y':
arr = np.dot(tests_common.rotation_matrix(
[1, 0, 0], -np.pi / 2.0), arr)
return arr
def swap_axis_inverse(self, arr, axis):
if axis == 'x':
arr = np.dot(tests_common.rotation_matrix(
[0, 1, 0], -np.pi / 2.0), arr)
elif axis == 'y':
arr = np.dot(tests_common.rotation_matrix(
[1, 0, 0], np.pi / 2.0), arr)
return arr
def pol_coords(self):
positions = np.zeros((len(self.positions), 3))
for i, p in enumerate(self.positions):
tmp = p - np.array(self.params['center'])
tmp = self.swap_axis_inverse(tmp, self.params['axis'])
positions[i, :] = tests_common.transform_pos_from_cartesian_to_polar_coordinates(
tmp)
return positions
def set_particles(self):
self.system.part.clear()
self.system.part.add(pos=self.positions)
def set_fluid_velocity(self):
del self.positions[:]
# Choose the cartesian velocities such that each particle gets the same
# v_r, v_phi and v_z, respectively.
self.v_r = .75
self.v_phi = 2.5
self.v_z = 1.5
node_positions = np.arange(-4.5, 5.0, 1.0)
for i, value in enumerate(node_positions):
position = np.array(
[node_positions[i], node_positions[i], node_positions[i]])
v_y = (position[0] * np.sqrt(position[0]**2.0 + position[1]**2.0) * self.v_phi +
position[1] * self.v_r) / np.sqrt(position[0]**2.0 + position[1]**2.0)
v_x = (self.v_r * np.sqrt(position[0]**2.0 + position[1] **
2.0) - position[1] * v_y) / position[0]
velocity = np.array([v_x, v_y, self.v_z])
velocity = self.swap_axis(velocity, self.params['axis'])
position = self.swap_axis(position, self.params['axis'])
position += np.array(self.params['center'])
self.positions.append(position)
self.lbf[np.array(position, dtype=int)].velocity = velocity
def set_fluid_velocity_on_all_nodes(self):
self.system.part.clear()
self.v_r = .75
self.v_phi = 2.5
self.v_z = 1.5
node_positions = np.arange(-4.5, 5.0, 1.0)
for x in node_positions:
for y in node_positions:
for z in node_positions:
position = np.array([x, y, z])
v_y = (position[0] * np.sqrt(position[0]**2.0 + position[1]**2.0) * self.v_phi +
position[1] * self.v_r) / np.sqrt(position[0]**2.0 + position[1]**2.0)
v_x = (self.v_r * np.sqrt(position[0]**2.0 + position[1] **
2.0) - position[1] * v_y) / position[0]
velocity = np.array([v_x, v_y, self.v_z])
velocity = self.swap_axis(velocity, self.params['axis'])
position = self.swap_axis(position, self.params['axis'])
position += np.array(self.params['center'])
self.positions.append(position)
self.lbf[np.array(position, dtype=int)].velocity = velocity
def normalize_with_bin_volume(self, histogram):
bin_volume = tests_common.get_cylindrical_bin_volume(
self.params['n_r_bins'],
self.params['n_phi_bins'],
self.params['n_z_bins'],
self.params['min_r'],
self.params['max_r'],
self.params['min_phi'],
self.params['max_phi'],
self.params['min_z'],
self.params['max_z'])
# Normalization
for i in range(self.params['n_r_bins']):
histogram[i, :, :] /= bin_volume[i]
return histogram
def LB_fluxdensity_profile_test(self):
self.set_fluid_velocity()
self.set_particles()
# Set up the Observable.
p = espressomd.observables.CylindricalLBFluxDensityProfileAtParticlePositions(
**self.params)
core_hist = np.array(
p.calculate()).reshape(
self.params['n_r_bins'],
self.params['n_phi_bins'],
self.params['n_z_bins'],
3)
core_hist_v_r = core_hist[:, :, :, 0]
core_hist_v_phi = core_hist[:, :, :, 1]
core_hist_v_z = core_hist[:, :, :, 2]
self.pol_positions = self.pol_coords()
np_hist, _ = np.histogramdd(self.pol_positions, bins=(self.params['n_r_bins'],
self.params['n_phi_bins'],
self.params['n_z_bins']),
range=[(self.params['min_r'],
self.params['max_r']),
(self.params['min_phi'],
self.params['max_phi']),
(self.params['min_z'],
self.params['max_z'])])
np_hist = self.normalize_with_bin_volume(np_hist)
np.testing.assert_array_almost_equal(np_hist * self.v_r, core_hist_v_r)
np.testing.assert_array_almost_equal(
np_hist * self.v_phi, core_hist_v_phi)
np.testing.assert_array_almost_equal(np_hist * self.v_z, core_hist_v_z)
def LB_velocity_profile_at_particle_positions_test(self):
self.set_fluid_velocity()
self.set_particles()
# Set up the Observable.
p = espressomd.observables.CylindricalLBVelocityProfileAtParticlePositions(
**self.params)
core_hist = np.array(
p.calculate()).reshape(
self.params['n_r_bins'],
self.params['n_phi_bins'],
self.params['n_z_bins'],
3)
core_hist_v_r = core_hist[:, :, :, 0]
core_hist_v_phi = core_hist[:, :, :, 1]
core_hist_v_z = core_hist[:, :, :, 2]
self.pol_positions = self.pol_coords()
np_hist, _ = np.histogramdd(self.pol_positions, bins=(self.params['n_r_bins'],
self.params['n_phi_bins'],
self.params['n_z_bins']),
range=[(self.params['min_r'],
self.params['max_r']),
(self.params['min_phi'],
self.params['max_phi']),
(self.params['min_z'],
self.params['max_z'])])
for x in np.nditer(np_hist, op_flags=['readwrite']):
if x[...] > 0.0:
x[...] /= x[...]
np.testing.assert_array_almost_equal(np_hist * self.v_r, core_hist_v_r)
np.testing.assert_array_almost_equal(np_hist * self.v_phi, core_hist_v_phi)
np.testing.assert_array_almost_equal(np_hist * self.v_z, core_hist_v_z)
def LB_velocity_profile_test(self):
self.set_fluid_velocity_on_all_nodes()
# Set up the Observable.
local_params = self.params.copy()
del local_params['ids']
local_params['sampling_delta_x'] = 1
local_params['sampling_delta_y'] = 1
local_params['sampling_delta_z'] = 1
local_params['sampling_offset_x'] = 0.5
local_params['sampling_offset_y'] = 0.5
local_params['sampling_offset_z'] = 0.5
local_params['allow_empty_bins'] = True
p = espressomd.observables.CylindricalLBVelocityProfile(
**local_params)
core_hist = np.array(
p.calculate()).reshape(
self.params['n_r_bins'],
self.params['n_phi_bins'],
self.params['n_z_bins'],
3)
core_hist_v_r = core_hist[:, :, :, 0]
core_hist_v_phi = core_hist[:, :, :, 1]
core_hist_v_z = core_hist[:, :, :, 2]
self.pol_positions = self.pol_coords()
np_hist, _ = np.histogramdd(self.pol_positions, bins=(self.params['n_r_bins'],
self.params['n_phi_bins'],
self.params['n_z_bins']),
range=[(self.params['min_r'],
self.params['max_r']),
(self.params['min_phi'],
self.params['max_phi']),
(self.params['min_z'],
self.params['max_z'])])
for x in np.nditer(np_hist, op_flags=['readwrite']):
if x[...] > 0.0:
x[...] /= x[...]
np.testing.assert_array_almost_equal(np_hist * self.v_r, core_hist_v_r)
np.testing.assert_array_almost_equal(np_hist * self.v_phi, core_hist_v_phi)
np.testing.assert_array_almost_equal(np_hist * self.v_z, core_hist_v_z)
def test_x_axis(self):
self.params['axis'] = 'x'
self.lbf = self.lbf_gpu
self.system.actors.add(self.lbf)
self.LB_fluxdensity_profile_test()
self.LB_velocity_profile_at_particle_positions_test()
self.LB_velocity_profile_test()
self.system.actors.remove(self.lbf)
self.lbf = self.lbf_cpu
self.system.actors.add(self.lbf)
self.LB_fluxdensity_profile_test()
self.LB_velocity_profile_test()
self.LB_velocity_profile_at_particle_positions_test()
self.system.actors.remove(self.lbf)
def test_y_axis(self):
self.params['axis'] = 'y'
self.lbf = self.lbf_gpu
self.system.actors.add(self.lbf)
self.LB_fluxdensity_profile_test()
self.LB_velocity_profile_at_particle_positions_test()
self.LB_velocity_profile_test()
self.system.actors.remove(self.lbf)
self.lbf = self.lbf_cpu
self.system.actors.add(self.lbf)
self.LB_fluxdensity_profile_test()
self.LB_velocity_profile_test()
self.LB_velocity_profile_at_particle_positions_test()
self.system.actors.remove(self.lbf)
def test_z_axis(self):
self.params['axis'] = 'z'
self.lbf = self.lbf_gpu
self.system.actors.add(self.lbf)
self.LB_fluxdensity_profile_test()
self.LB_velocity_profile_at_particle_positions_test()
self.LB_velocity_profile_test()
self.system.actors.remove(self.lbf)
self.lbf = self.lbf_cpu
self.system.actors.add(self.lbf)
self.LB_fluxdensity_profile_test()
self.LB_velocity_profile_test()
self.LB_velocity_profile_at_particle_positions_test()
self.system.actors.remove(self.lbf)
if __name__ == "__main__":
suite = ut.TestSuite()
suite.addTests(ut.TestLoader().loadTestsFromTestCase(
TestCylindricalLBObservable))
result = ut.TextTestRunner(verbosity=4).run(suite)
sys.exit(not result.wasSuccessful())
| KonradBreitsprecher/espresso | testsuite/observable_cylindricalLB.py | Python | gpl-3.0 | 12,645 |
# coding=utf-8
import re
from typing import Optional, List
from PyQt5.QtCore import QObject, QRegularExpression, pyqtProperty, pyqtSlot
from PyQt5.QtCore import QVariant
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtGui import QColor, QFont
from PyQt5.QtGui import QTextCharFormat
from PyQt5.QtQml import QJSValue
from PyQt5.QtQuick import QQuickItem
from ifj2017.ide.code.expression import SyntaxHighlighter, HighlightRule
from ifj2017.ide.settings import SEARCH_FORMAT
__author__ = "Son Hai Nguyen"
__copyright__ = "Copyright 2017, /dej/uran/dom team"
__credits__ = ["Josef Kolář", "Son Hai Nguyen", "Martin Omacht", "Robert Navrátil"]
__license__ = "GNU GPL Version 3"
class ExpSyntaxHighlighter(QObject):
"""
Class which wraps SyntaxHighliter and expose only target
"""
searchMatchedLinesChanged = pyqtSignal(QVariant, arguments=["lines"])
def __init__(self, parent: Optional[QObject] = None):
super().__init__(parent)
self._syntax_highlighter = SyntaxHighlighter(self)
self._base_font = None
self._search_matched_lines = []
def _setupFormat(self, color: QColor, fontSettings: QFont, colorIsForeground: bool = True) -> QTextCharFormat:
pattern_format = QTextCharFormat()
if color and colorIsForeground:
pattern_format.setForeground(color)
if color and (not colorIsForeground):
pattern_format.setBackground(color)
pattern_format.setFontItalic(fontSettings.italic())
pattern_format.setFontWeight(fontSettings.bold())
return pattern_format
@pyqtSlot(list, QJSValue, QFont)
def addHighlightMultiColorRule(self, patterns: List[str], color: QJSValue,
fontSettings: QFont) -> None:
"""
Adds highlight rule to syntax highlighter
:param patterns: Regexp pattners to be matched
:param color: Foreground color of matched text
:param fontSettings: Determinates font weight and italic
"""
pattern_format = list()
self._base_font = fontSettings
for single_color in color.toVariant():
pattern_format.append(self._setupFormat(QColor(single_color), fontSettings))
for single_pattern in patterns:
self._syntax_highlighter.addHighlightRule(
HighlightRule(pattern_format, QRegularExpression(single_pattern))
)
@pyqtSlot(list, QColor, QFont)
def addHighlightSingleColorRule(self, patterns: List[str], color: QColor, fontSettings: QFont) -> None:
"""
Adds highlight rule to syntax highlighter
:param patterns: Regexp pattners to be matched
:param color: Foreground color of matched text
:param fontSettings: Determinates font weight and italic
"""
pattern_format = self._setupFormat(color, fontSettings)
self._base_font = fontSettings
for single_pattern in patterns:
self._syntax_highlighter.addHighlightRule(
HighlightRule(pattern_format, QRegularExpression(single_pattern))
)
@pyqtSlot(str)
def setSearchPattern(self, pattern: str) -> None:
pattern_format = self._setupFormat(QColor(SEARCH_FORMAT), self._base_font, False)
if pattern:
self._syntax_highlighter.setSearchRule(
HighlightRule(pattern_format, QRegularExpression(re.escape(pattern), QRegularExpression.CaseInsensitiveOption))
)
else:
self._syntax_highlighter.setSearchRule(None)
self._syntax_highlighter.rehighlight()
self._search_matched_lines = self._syntax_highlighter.searchMatchedLines()
self.searchMatchedLinesChanged.emit(QVariant(self._search_matched_lines))
@pyqtProperty(QVariant, notify=searchMatchedLinesChanged)
def searchMatchedLines(self):
return QVariant(self._search_matched_lines)
@pyqtProperty(QQuickItem)
def target(self) -> None:
return self._syntax_highlighter.target
@target.setter
def target(self, v: QQuickItem) -> None:
self._syntax_highlighter.target = v
| thejoeejoee/VUT-FIT-IFJ-2017-toolkit | ifj2017/ide/code/expression/exp_syntax_highlighter.py | Python | gpl-3.0 | 4,135 |
from dolfin import *
from dolfin_adjoint import *
if not hasattr(dolfin, "FunctionAssigner"):
info_red("Need dolfin.FunctionAssigner for this test.")
import sys
sys.exit(0)
mesh = UnitIntervalMesh(2)
V = VectorFunctionSpace(mesh, "CG", 2)
P = FunctionSpace(mesh, "CG", 1)
Z = MixedFunctionSpace([V, P])
def main(u, p):
assigner_u = FunctionAssigner(Z.sub(0), V)
assigner_p = FunctionAssigner(Z.sub(1), P)
z = Function(Z, name="Output")
assigner_u.assign(z.sub(0), u)
assigner_p.assign(z.sub(1), p)
return z
if __name__ == "__main__":
u = interpolate(Constant((1,)), V, name="Velocity")
p = interpolate(Expression("x[0] + 1.0"), P, name="Pressure")
z = main(u, p)
A = tuple(p.vector())
B = tuple(Function(z.sub(1)).vector())
assert A == B # Check for some dolfin bugs that have been fixed
assert adjglobals.adjointer.equation_count == 5
success = replay_dolfin(tol=0.0, stop=True)
assert success
form = lambda z: inner(z, z)*dx
J = Functional(form(z), name="a")
m = FunctionControl("Pressure")
Jm = assemble(form(z))
dJdm = compute_gradient(J, m, forget=False)
eps = 0.0001
dJdm_fd = Function(P)
for i in range(P.dim()):
p_ptb = Function(p)
p_ptb.vector()[i] += eps
z_ptb = main(u, p_ptb)
J_ptb = assemble(form(z_ptb))
dJdm_fd.vector()[i] = (J_ptb - Jm)/eps
print "dJdm_fd: ", list(dJdm_fd.vector())
dJdm_tlm_result = Function(P)
dJdm_tlm = compute_gradient_tlm(J, m, forget=False)
for i in range(P.dim()):
test_vec = Function(P)
test_vec.vector()[i] = 1.0
dJdm_tlm_result.vector()[i] = dJdm_tlm.inner(test_vec.vector())
print "dJdm_tlm: ", list(dJdm_tlm_result.vector())
def Jhat(p):
z = main(u, p)
return assemble(form(z))
minconv = taylor_test(Jhat, m, Jm, dJdm, seed=1.0e-3)
assert minconv > 1.8
minconv = taylor_test(Jhat, m, Jm, dJdm_tlm, seed=1.0e-3)
assert minconv > 1.8
| pf4d/dolfin-adjoint | tests_dolfin/function_assigner/function_assigner.py | Python | lgpl-3.0 | 2,022 |
from pythonthegathering import ManaPool, spell
pool = ManaPool()
@spell('WBB')
def boop(x):
print(x)
pool.tap('plains').tap('swamp').tap('swamp')
boop('boop', mana_pool=pool, mana_pay={'W': 1, 'B': 2})
| linky00/pythonthegathering | test.py | Python | mit | 209 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# Daniel Campos (danielcampos@avanzosc.es) Date: 29/09/2014
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from . import mrp_subproduct
from . import mrp_production
| InakiZabala/odoomrp-wip | mrp_byproduct_operations/models/__init__.py | Python | agpl-3.0 | 982 |
"""Implementation of the WebSocket protocol.
`WebSockets <http://dev.w3.org/html5/websockets/>`_ allow for bidirectional
communication between the browser and server.
.. warning::
The WebSocket protocol was recently finalized as `RFC 6455
<http://tools.ietf.org/html/rfc6455>`_ and is not yet supported in
all browsers. Refer to http://caniuse.com/websockets for details
on compatibility. In addition, during development the protocol
went through several incompatible versions, and some browsers only
support older versions. By default this module only supports the
latest version of the protocol, but optional support for an older
version (known as "draft 76" or "hixie-76") can be enabled by
overriding `WebSocketHandler.allow_draft76` (see that method's
documentation for caveats).
"""
from __future__ import absolute_import, division, print_function, with_statement
# Author: Jacob Kristhammar, 2010
import array
import base64
import collections
import functools
import hashlib
import os
import struct
import time
import tornado.escape
import tornado.web
from tornado.concurrent import Future
from tornado.escape import utf8, native_str
from tornado import httpclient
from tornado.ioloop import IOLoop
from tornado.log import gen_log, app_log
from tornado.netutil import Resolver
from tornado import simple_httpclient
from tornado.util import bytes_type, unicode_type
try:
xrange # py2
except NameError:
xrange = range # py3
class WebSocketHandler(tornado.web.RequestHandler):
"""Subclass this class to create a basic WebSocket handler.
Override `on_message` to handle incoming messages, and use
`write_message` to send messages to the client. You can also
override `open` and `on_close` to handle opened and closed
connections.
See http://dev.w3.org/html5/websockets/ for details on the
JavaScript interface. The protocol is specified at
http://tools.ietf.org/html/rfc6455.
Here is an example WebSocket handler that echos back all received messages
back to the client::
class EchoWebSocket(websocket.WebSocketHandler):
def open(self):
print "WebSocket opened"
def on_message(self, message):
self.write_message(u"You said: " + message)
def on_close(self):
print "WebSocket closed"
WebSockets are not standard HTTP connections. The "handshake" is
HTTP, but after the handshake, the protocol is
message-based. Consequently, most of the Tornado HTTP facilities
are not available in handlers of this type. The only communication
methods available to you are `write_message()`, `ping()`, and
`close()`. Likewise, your request handler class should implement
`open()` method rather than ``get()`` or ``post()``.
If you map the handler above to ``/websocket`` in your application, you can
invoke it in JavaScript with::
var ws = new WebSocket("ws://localhost:8888/websocket");
ws.onopen = function() {
ws.send("Hello, world");
};
ws.onmessage = function (evt) {
alert(evt.data);
};
This script pops up an alert box that says "You said: Hello, world".
"""
def __init__(self, application, request, **kwargs):
tornado.web.RequestHandler.__init__(self, application, request,
**kwargs)
self.stream = request.connection.stream
self.ws_connection = None
def _execute(self, transforms, *args, **kwargs):
self.open_args = args
self.open_kwargs = kwargs
# Websocket only supports GET method
if self.request.method != 'GET':
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 405 Method Not Allowed\r\n\r\n"
))
self.stream.close()
return
# Upgrade header should be present and should be equal to WebSocket
if self.request.headers.get("Upgrade", "").lower() != 'websocket':
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 400 Bad Request\r\n\r\n"
"Can \"Upgrade\" only to \"WebSocket\"."
))
self.stream.close()
return
# Connection header should be upgrade. Some proxy servers/load balancers
# might mess with it.
headers = self.request.headers
connection = map(lambda s: s.strip().lower(), headers.get("Connection", "").split(","))
if 'upgrade' not in connection:
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 400 Bad Request\r\n\r\n"
"\"Connection\" must be \"Upgrade\"."
))
self.stream.close()
return
# The difference between version 8 and 13 is that in 8 the
# client sends a "Sec-Websocket-Origin" header and in 13 it's
# simply "Origin".
if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"):
self.ws_connection = WebSocketProtocol13(self)
self.ws_connection.accept_connection()
elif (self.allow_draft76() and
"Sec-WebSocket-Version" not in self.request.headers):
self.ws_connection = WebSocketProtocol76(self)
self.ws_connection.accept_connection()
else:
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 426 Upgrade Required\r\n"
"Sec-WebSocket-Version: 8\r\n\r\n"))
self.stream.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket.
The message may be either a string or a dict (which will be
encoded as json). If the ``binary`` argument is false, the
message will be sent as utf8; in binary mode any byte string
is allowed.
"""
if isinstance(message, dict):
message = tornado.escape.json_encode(message)
self.ws_connection.write_message(message, binary=binary)
def select_subprotocol(self, subprotocols):
"""Invoked when a new WebSocket requests specific subprotocols.
``subprotocols`` is a list of strings identifying the
subprotocols proposed by the client. This method may be
overridden to return one of those strings to select it, or
``None`` to not select a subprotocol. Failure to select a
subprotocol does not automatically abort the connection,
although clients may close the connection if none of their
proposed subprotocols was selected.
"""
return None
def open(self):
"""Invoked when a new WebSocket is opened.
The arguments to `open` are extracted from the `tornado.web.URLSpec`
regular expression, just like the arguments to
`tornado.web.RequestHandler.get`.
"""
pass
def on_message(self, message):
"""Handle incoming messages on the WebSocket
This method must be overridden.
"""
raise NotImplementedError
def ping(self, data):
"""Send ping frame to the remote end."""
self.ws_connection.write_ping(data)
def on_pong(self, data):
"""Invoked when the response to a ping frame is received."""
pass
def on_close(self):
"""Invoked when the WebSocket is closed."""
pass
def close(self):
"""Closes this Web Socket.
Once the close handshake is successful the socket will be closed.
"""
self.ws_connection.close()
def allow_draft76(self):
"""Override to enable support for the older "draft76" protocol.
The draft76 version of the websocket protocol is disabled by
default due to security concerns, but it can be enabled by
overriding this method to return True.
Connections using the draft76 protocol do not support the
``binary=True`` flag to `write_message`.
Support for the draft76 protocol is deprecated and will be
removed in a future version of Tornado.
"""
return False
def get_websocket_scheme(self):
"""Return the url scheme used for this request, either "ws" or "wss".
This is normally decided by HTTPServer, but applications
may wish to override this if they are using an SSL proxy
that does not provide the X-Scheme header as understood
by HTTPServer.
Note that this is only used by the draft76 protocol.
"""
return "wss" if self.request.protocol == "https" else "ws"
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is normally unncecessary thanks to
`tornado.stack_context`.
"""
return self.ws_connection.async_callback(callback, *args, **kwargs)
def _not_supported(self, *args, **kwargs):
raise Exception("Method not supported for Web Sockets")
def on_connection_close(self):
if self.ws_connection:
self.ws_connection.on_connection_close()
self.ws_connection = None
self.on_close()
for method in ["write", "redirect", "set_header", "send_error", "set_cookie",
"set_status", "flush", "finish"]:
setattr(WebSocketHandler, method, WebSocketHandler._not_supported)
class WebSocketProtocol(object):
"""Base class for WebSocket protocol versions.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.stream = handler.stream
self.client_terminated = False
self.server_terminated = False
def async_callback(self, callback, *args, **kwargs):
"""Wrap callbacks with this if they are used on asynchronous requests.
Catches exceptions properly and closes this WebSocket if an exception
is uncaught.
"""
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception:
app_log.error("Uncaught exception in %s",
self.request.path, exc_info=True)
self._abort()
return wrapper
def on_connection_close(self):
self._abort()
def _abort(self):
"""Instantly aborts the WebSocket connection by closing the socket"""
self.client_terminated = True
self.server_terminated = True
self.stream.close() # forcibly tear down the connection
self.close() # let the subclass cleanup
class WebSocketProtocol76(WebSocketProtocol):
"""Implementation of the WebSockets protocol, version hixie-76.
This class provides basic functionality to process WebSockets requests as
specified in
http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
"""
def __init__(self, handler):
WebSocketProtocol.__init__(self, handler)
self.challenge = None
self._waiting = None
def accept_connection(self):
try:
self._handle_websocket_headers()
except ValueError:
gen_log.debug("Malformed WebSocket request received")
self._abort()
return
scheme = self.handler.get_websocket_scheme()
# draft76 only allows a single subprotocol
subprotocol_header = ''
subprotocol = self.request.headers.get("Sec-WebSocket-Protocol", None)
if subprotocol:
selected = self.handler.select_subprotocol([subprotocol])
if selected:
assert selected == subprotocol
subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected
# Write the initial headers before attempting to read the challenge.
# This is necessary when using proxies (such as HAProxy), which
# need to see the Upgrade headers before passing through the
# non-HTTP traffic that follows.
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 WebSocket Protocol Handshake\r\n"
"Upgrade: WebSocket\r\n"
"Connection: Upgrade\r\n"
"Server: TornadoServer/%(version)s\r\n"
"Sec-WebSocket-Origin: %(origin)s\r\n"
"Sec-WebSocket-Location: %(scheme)s://%(host)s%(uri)s\r\n"
"%(subprotocol)s"
"\r\n" % (dict(
version=tornado.version,
origin=self.request.headers["Origin"],
scheme=scheme,
host=self.request.host,
uri=self.request.uri,
subprotocol=subprotocol_header))))
self.stream.read_bytes(8, self._handle_challenge)
def challenge_response(self, challenge):
"""Generates the challenge response that's needed in the handshake
The challenge parameter should be the raw bytes as sent from the
client.
"""
key_1 = self.request.headers.get("Sec-Websocket-Key1")
key_2 = self.request.headers.get("Sec-Websocket-Key2")
try:
part_1 = self._calculate_part(key_1)
part_2 = self._calculate_part(key_2)
except ValueError:
raise ValueError("Invalid Keys/Challenge")
return self._generate_challenge_response(part_1, part_2, challenge)
def _handle_challenge(self, challenge):
try:
challenge_response = self.challenge_response(challenge)
except ValueError:
gen_log.debug("Malformed key data in WebSocket request")
self._abort()
return
self._write_response(challenge_response)
def _write_response(self, challenge):
self.stream.write(challenge)
self.async_callback(self.handler.open)(*self.handler.open_args, **self.handler.open_kwargs)
self._receive_message()
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Origin", "Host", "Sec-Websocket-Key1",
"Sec-Websocket-Key2")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
def _calculate_part(self, key):
"""Processes the key headers and calculates their key value.
Raises ValueError when feed invalid key."""
# pyflakes complains about variable reuse if both of these lines use 'c'
number = int(''.join(c for c in key if c.isdigit()))
spaces = len([c2 for c2 in key if c2.isspace()])
try:
key_number = number // spaces
except (ValueError, ZeroDivisionError):
raise ValueError
return struct.pack(">I", key_number)
def _generate_challenge_response(self, part_1, part_2, part_3):
m = hashlib.md5()
m.update(part_1)
m.update(part_2)
m.update(part_3)
return m.digest()
def _receive_message(self):
self.stream.read_bytes(1, self._on_frame_type)
def _on_frame_type(self, byte):
frame_type = ord(byte)
if frame_type == 0x00:
self.stream.read_until(b"\xff", self._on_end_delimiter)
elif frame_type == 0xff:
self.stream.read_bytes(1, self._on_length_indicator)
else:
self._abort()
def _on_end_delimiter(self, frame):
if not self.client_terminated:
self.async_callback(self.handler.on_message)(
frame[:-1].decode("utf-8", "replace"))
if not self.client_terminated:
self._receive_message()
def _on_length_indicator(self, byte):
if ord(byte) != 0x00:
self._abort()
return
self.client_terminated = True
self.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
raise ValueError(
"Binary messages not supported by this version of websockets")
if isinstance(message, unicode_type):
message = message.encode("utf-8")
assert isinstance(message, bytes_type)
self.stream.write(b"\x00" + message + b"\xff")
def write_ping(self, data):
"""Send ping frame."""
raise ValueError("Ping messages not supported by this version of websockets")
def close(self):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
self.stream.write("\xff\x00")
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
self._waiting = self.stream.io_loop.add_timeout(
time.time() + 5, self._abort)
class WebSocketProtocol13(WebSocketProtocol):
"""Implementation of the WebSocket protocol from RFC 6455.
This class supports versions 7 and 8 of the protocol in addition to the
final version 13.
"""
def __init__(self, handler, mask_outgoing=False):
WebSocketProtocol.__init__(self, handler)
self.mask_outgoing = mask_outgoing
self._final_frame = False
self._frame_opcode = None
self._masked_frame = None
self._frame_mask = None
self._frame_length = None
self._fragmented_message_buffer = None
self._fragmented_message_opcode = None
self._waiting = None
def accept_connection(self):
try:
self._handle_websocket_headers()
self._accept_connection()
except ValueError:
gen_log.debug("Malformed WebSocket request received", exc_info=True)
self._abort()
return
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
@staticmethod
def compute_accept_value(key):
"""Computes the value for the Sec-WebSocket-Accept header,
given the value for Sec-WebSocket-Key.
"""
sha1 = hashlib.sha1()
sha1.update(utf8(key))
sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11") # Magic value
return native_str(base64.b64encode(sha1.digest()))
def _challenge_response(self):
return WebSocketProtocol13.compute_accept_value(
self.request.headers.get("Sec-Websocket-Key"))
def _accept_connection(self):
subprotocol_header = ''
subprotocols = self.request.headers.get("Sec-WebSocket-Protocol", '')
subprotocols = [s.strip() for s in subprotocols.split(',')]
if subprotocols:
selected = self.handler.select_subprotocol(subprotocols)
if selected:
assert selected in subprotocols
subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 Switching Protocols\r\n"
"Upgrade: websocket\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Accept: %s\r\n"
"%s"
"\r\n" % (self._challenge_response(), subprotocol_header)))
self.async_callback(self.handler.open)(*self.handler.open_args, **self.handler.open_kwargs)
self._receive_frame()
def _write_frame(self, fin, opcode, data):
if fin:
finbit = 0x80
else:
finbit = 0
frame = struct.pack("B", finbit | opcode)
l = len(data)
if self.mask_outgoing:
mask_bit = 0x80
else:
mask_bit = 0
if l < 126:
frame += struct.pack("B", l | mask_bit)
elif l <= 0xFFFF:
frame += struct.pack("!BH", 126 | mask_bit, l)
else:
frame += struct.pack("!BQ", 127 | mask_bit, l)
if self.mask_outgoing:
mask = os.urandom(4)
data = mask + self._apply_mask(mask, data)
frame += data
self.stream.write(frame)
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
opcode = 0x2
else:
opcode = 0x1
message = tornado.escape.utf8(message)
assert isinstance(message, bytes_type)
self._write_frame(True, opcode, message)
def write_ping(self, data):
"""Send ping frame."""
assert isinstance(data, bytes_type)
self._write_frame(True, 0x9, data)
def _receive_frame(self):
self.stream.read_bytes(2, self._on_frame_start)
def _on_frame_start(self, data):
header, payloadlen = struct.unpack("BB", data)
self._final_frame = header & 0x80
reserved_bits = header & 0x70
self._frame_opcode = header & 0xf
self._frame_opcode_is_control = self._frame_opcode & 0x8
if reserved_bits:
# client is using as-yet-undefined extensions; abort
self._abort()
return
self._masked_frame = bool(payloadlen & 0x80)
payloadlen = payloadlen & 0x7f
if self._frame_opcode_is_control and payloadlen >= 126:
# control frames must have payload < 126
self._abort()
return
if payloadlen < 126:
self._frame_length = payloadlen
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
elif payloadlen == 126:
self.stream.read_bytes(2, self._on_frame_length_16)
elif payloadlen == 127:
self.stream.read_bytes(8, self._on_frame_length_64)
def _on_frame_length_16(self, data):
self._frame_length = struct.unpack("!H", data)[0]
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
def _on_frame_length_64(self, data):
self._frame_length = struct.unpack("!Q", data)[0]
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
def _on_masking_key(self, data):
self._frame_mask = data
self.stream.read_bytes(self._frame_length, self._on_masked_frame_data)
def _apply_mask(self, mask, data):
mask = array.array("B", mask)
unmasked = array.array("B", data)
for i in xrange(len(data)):
unmasked[i] = unmasked[i] ^ mask[i % 4]
if hasattr(unmasked, 'tobytes'):
# tostring was deprecated in py32. It hasn't been removed,
# but since we turn on deprecation warnings in our tests
# we need to use the right one.
return unmasked.tobytes()
else:
return unmasked.tostring()
def _on_masked_frame_data(self, data):
self._on_frame_data(self._apply_mask(self._frame_mask, data))
def _on_frame_data(self, data):
if self._frame_opcode_is_control:
# control frames may be interleaved with a series of fragmented
# data frames, so control frames must not interact with
# self._fragmented_*
if not self._final_frame:
# control frames must not be fragmented
self._abort()
return
opcode = self._frame_opcode
elif self._frame_opcode == 0: # continuation frame
if self._fragmented_message_buffer is None:
# nothing to continue
self._abort()
return
self._fragmented_message_buffer += data
if self._final_frame:
opcode = self._fragmented_message_opcode
data = self._fragmented_message_buffer
self._fragmented_message_buffer = None
else: # start of new data message
if self._fragmented_message_buffer is not None:
# can't start new message until the old one is finished
self._abort()
return
if self._final_frame:
opcode = self._frame_opcode
else:
self._fragmented_message_opcode = self._frame_opcode
self._fragmented_message_buffer = data
if self._final_frame:
self._handle_message(opcode, data)
if not self.client_terminated:
self._receive_frame()
def _handle_message(self, opcode, data):
if self.client_terminated:
return
if opcode == 0x1:
# UTF-8 data
try:
decoded = data.decode("utf-8")
except UnicodeDecodeError:
self._abort()
return
self.async_callback(self.handler.on_message)(decoded)
elif opcode == 0x2:
# Binary data
self.async_callback(self.handler.on_message)(data)
elif opcode == 0x8:
# Close
self.client_terminated = True
self.close()
elif opcode == 0x9:
# Ping
self._write_frame(True, 0xA, data)
elif opcode == 0xA:
# Pong
self.async_callback(self.handler.on_pong)(data)
else:
self._abort()
def close(self):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
self._write_frame(True, 0x8, b"")
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
# Give the client a few seconds to complete a clean shutdown,
# otherwise just close the connection.
self._waiting = self.stream.io_loop.add_timeout(
self.stream.io_loop.time() + 5, self._abort)
class WebSocketClientConnection(simple_httpclient._HTTPConnection):
"""WebSocket client connection."""
def __init__(self, io_loop, request):
self.connect_future = Future()
self.read_future = None
self.read_queue = collections.deque()
self.key = base64.b64encode(os.urandom(16))
scheme, sep, rest = request.url.partition(':')
scheme = {'ws': 'http', 'wss': 'https'}[scheme]
request.url = scheme + sep + rest
request.headers.update({
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.key,
'Sec-WebSocket-Version': '13',
})
super(WebSocketClientConnection, self).__init__(
io_loop, None, request, lambda: None, lambda response: None,
104857600, Resolver(io_loop=io_loop))
def _on_close(self):
self.on_message(None)
def _handle_1xx(self, code):
assert code == 101
assert self.headers['Upgrade'].lower() == 'websocket'
assert self.headers['Connection'].lower() == 'upgrade'
accept = WebSocketProtocol13.compute_accept_value(self.key)
assert self.headers['Sec-Websocket-Accept'] == accept
self.protocol = WebSocketProtocol13(self, mask_outgoing=True)
self.protocol._receive_frame()
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
self.connect_future.set_result(self)
def write_message(self, message, binary=False):
"""Sends a message to the WebSocket server."""
self.protocol.write_message(message, binary)
def read_message(self, callback=None):
"""Reads a message from the WebSocket server.
Returns a future whose result is the message, or None
if the connection is closed. If a callback argument
is given it will be called with the future when it is
ready.
"""
assert self.read_future is None
future = Future()
if self.read_queue:
future.set_result(self.read_queue.popleft())
else:
self.read_future = future
if callback is not None:
self.io_loop.add_future(future, callback)
return future
def on_message(self, message):
if self.read_future is not None:
self.read_future.set_result(message)
self.read_future = None
else:
self.read_queue.append(message)
def on_pong(self, data):
pass
def websocket_connect(url, io_loop=None, callback=None):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
`WebSocketClientConnection`.
"""
if io_loop is None:
io_loop = IOLoop.current()
request = httpclient.HTTPRequest(url)
request = httpclient._RequestProxy(
request, httpclient.HTTPRequest._DEFAULTS)
conn = WebSocketClientConnection(io_loop, request)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
| hiphopsmurf/bitcoin-secured | online/build/tornado/tornado/websocket.py | Python | mit | 29,915 |
#!/usr/bin/env python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# tails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
# TODO:
#
# Notes for future rewrite:
#
# - Make clear which functions return values and which write out values
# render_*, add_*, write_* (e.g. icon() -> outputs directly,
# render_icon() -> returns icon
#
# - Order of arguments:
# e.g. icon(help, icon) -> change and make help otional?
#
# - Fix names of message() show_error() show_warning()
#
# - change naming of html.attrencode() to html.render()
#
# - General rules:
# 1. values of type str that are passed as arguments or
# return values or are stored in datastructures must not contain
# non-Ascii characters! UTF-8 encoding must just be used in
# the last few CPU cycles before outputting. Conversion from
# input to str or unicode must happen as early as possible,
# directly when reading from file or URL.
#
# - indentify internal helper methods and prefix them with "_"
#
# - Split HTML handling (page generating) code and generic request
# handling (vars, cookies, ...) up into separate classes to make
# the different tasks clearer. For example a RequestHandler()
# and a HTMLGenerator() or similar.
import time
import os
import urllib
import random
import re
import __builtin__
import signal
from collections import deque
try:
import simplejson as json
except ImportError:
import json
from cmk.exceptions import MKGeneralException, MKException
from lib import MKUserError
# TODO: REMOVE (JUST FOR TESTING)
#__builtin__._ = lambda x: x
# Information about uri
class InvalidUserInput(Exception):
def __init__(self, varname, text):
self.varname = varname
self.text = text
class RequestTimeout(MKException):
pass
#.
# .--HTML----------------------------------------------------------------.
# | _ _ _____ __ __ _ |
# | | | | |_ _| \/ | | |
# | | |_| | | | | |\/| | | |
# | | _ | | | | | | | |___ |
# | |_| |_| |_| |_| |_|_____| |
# | |
# +----------------------------------------------------------------------+
# | This is a simple class which wraps a string provided by the caller |
# | to make html.attrencode() know that this string should not be |
# | encoded, html.attrencode() will then return the unmodified value. |
# | |
# | This way we can implement encodings while still allowing HTML code |
# | processing for some special cases. This is useful when one needs |
# | to print out HTML tables in messages or help texts. |
# | |
# | The class now implements all relevant string comparison methods. |
# | The HTMLGenerator render_tag() function returns a HTML object. |
# '----------------------------------------------------------------------'
class HTML(object):
def __init__(self, value):
super(HTML, self).__init__()
if isinstance(value, HTML):
self.value = value.value
else:
self.value = value
def __str__(self):
return self.value
def __add__(self, other):
if isinstance(other, HTML):
return self.value + other.value
else:
return self.value + other
def __radd__(self, other):
if isinstance(other, HTML):
return other.value + self.value
else:
return other + self.value
def __iadd__(self, other):
self.value = self.__add__(other)
return self
def __lt__(self, other):
if isinstance(other, HTML):
return self.value < other.value
else:
return self.value < other
def ___le__(self, other):
if isinstance(other, HTML):
return self.value <= other.value
else:
return self.value <= other
def __eq__(self, other):
if isinstance(other, HTML):
return self.value == other.value
else:
return self.value == other
def __ne__(self, other):
if isinstance(other, HTML):
return self.value != other.value
else:
return self.value != other
def __gt__(self, other):
if isinstance(other, HTML):
return self.value > other.value
else:
return self.value > other
def __ge__(self, other):
if isinstance(other, HTML):
return self.value >= other.value
else:
return self.value >= other
def __len__(self):
return len(self.value)
__builtin__.HTML = HTML
# .--OutputFunnel--------------------------------------------------------.
# | ___ _ _ _____ _ |
# | / _ \ _ _| |_ _ __ _ _| |_| ___| _ _ __ _ __ ___| | |
# | | | | | | | | __| '_ \| | | | __| |_ | | | | '_ \| '_ \ / _ \ | |
# | | |_| | |_| | |_| |_) | |_| | |_| _|| |_| | | | | | | | __/ | |
# | \___/ \__,_|\__| .__/ \__,_|\__|_| \__,_|_| |_|_| |_|\___|_| |
# | |_| |
# +----------------------------------------------------------------------+
# | Provides the write functionality. The method lowlevel_write needs to |
# | to be overwritten in the specific subclass! |
# '----------------------------------------------------------------------'
class OutputFunnel(object):
def __init__(self):
self.plugged = False
self.plugged_text = ""
# Accepts str and unicode objects only!
# The plugged functionality can be used for debugging.
def write(self, text):
if isinstance(text, HTML):
text = text.value
if type(text) not in [str, unicode]: # also possible: type Exception!
raise MKGeneralException(_('Write accepts str and unicode objects only!'))
if self.plugged:
self.plugged_text += text
else:
# encode when really writing out the data. Not when writing plugged,
# because the plugged code will be handled somehow by our code. We
# only encode when leaving the pythonic world.
if type(text) == unicode:
text = text.encode("utf-8")
self.lowlevel_write(text)
def lowlevel_write(self, text):
raise NotImplementedError()
# Put in a plug which stops the text stream and redirects it to a sink.
def plug(self):
self.plugged = True
self.plugged_text = ''
def is_plugged(self):
return self.plugged
# Pull the plug for a moment to allow the sink content to pass through.
def flush(self):
if self.plugged:
text = self.plugged_text
# encode when really writing out the data. Not when writing plugged,
# because the plugged code will be handled somehow by our code. We
# only encode when leaving the pythonic world.
if type(text) == unicode:
text = text.encode("utf-8")
self.lowlevel_write(text)
self.plugged_text = ''
# Get the sink content in order to do something with it.
def drain(self):
if self.plugged:
text = self.plugged_text
self.plugged_text = ''
return text
else:
return ''
def unplug(self):
self.flush()
self.plugged = False
#.
# .--HTML Generator------------------------------------------------------.
# | _ _ _____ __ __ _ |
# | | | | |_ _| \/ | | |
# | | |_| | | | | |\/| | | |
# | | _ | | | | | | | |___ |
# | |_| |_| |_| |_| |_|_____| |
# | |
# | ____ _ |
# | / ___| ___ _ __ ___ _ __ __ _| |_ ___ _ __ |
# | | | _ / _ \ '_ \ / _ \ '__/ _` | __/ _ \| '__| |
# | | |_| | __/ | | | __/ | | (_| | || (_) | | |
# | \____|\___|_| |_|\___|_| \__,_|\__\___/|_| |
# | |
# +----------------------------------------------------------------------+
# | Generator which provides top level HTML writing functionality. |
# '----------------------------------------------------------------------'
class HTMLGenerator(OutputFunnel):
""" Usage Notes:
- Tags can be opened using the open_[tag]() call where [tag] is one of the possible tag names.
All attributes can be passed as function arguments, such as open_div(class_="example").
However, python specific key words need to be escaped using a trailing underscore.
One can also provide a dictionary as attributes: open_div(**{"class": "example"}).
- All tags can be closed again using the close_[tag]() syntax.
- For tags which shall only contain plain text (i.e. no tags other than highlighting tags)
you can a the direct call using the tag name only as function name,
self.div("Text content", **attrs). Tags featuring this functionality are listed in
the "featured shortcuts" list.
- Some tags require mandatory arguments. Those are defined explicitly below.
For example an a tag needs the href attribute everytime.
- If you want to provide plain HTML to a tag, please use the tag_content function or
facillitate the HTML class.
HOWTO HTML Attributes:
- Python specific attributes have to be escaped using a trailing underscore
- All attributes can be python objects. However, some attributes can also be lists of attrs:
'class' attributes will be concatenated using one whitespace
'style' attributes will be concatenated using the semicolon and one whitespace
Behaviorial attributes such as 'onclick', 'onmouseover' will bec concatenated using
a semicolon and one whitespace.
- All attributes will be escaped, i.e. the characters '&', '<', '>', '"' will be replaced by
non HtML relevant signs '&', '<', '>' and '"'. """
# these tags can be called by their tag names, e.g. 'self.title(content)'
_shortcut_tags = set(['title', 'h1', 'h2', 'h3', 'th', 'tr', 'td', 'center',\
'div', 'p', 'span', 'canvas', 'strong', 'sub', 'tt', 'u'])
# these tags can be called by open_name(), close_name() and render_name(), e.g. 'self.open_html()'
_tag_names = set(['html', 'head', 'body', 'header', 'footer', 'a', 'b',\
'script', 'form', 'button', 'p', 'select', 'pre',\
'table', 'row', 'ul', 'li', 'br', 'nobr', 'input',\
'tt'])
# Of course all shortcut tags can be used as well.
_tag_names.update(_shortcut_tags)
def __init__(self):
super(HTMLGenerator, self).__init__()
self.indent_level = 0
self.indent = 2
#
# Escaping functions
#
# Encode HTML attributes. Replace HTML syntax with HTML text.
# For example: replace '"' with '"', '<' with '<'.
# This code is slow. Works on str and unicode without changing
# the type. Also works on things that can be converted with '%s'.
def _escape_attribute(self, value):
attr_type = type(value)
if value is None:
return ''
elif attr_type == int:
return str(value)
elif isinstance(value, HTML):
return value.value # This is HTML code which must not be escaped
elif attr_type not in [str, unicode]: # also possible: type Exception!
value = "%s" % value # Note: this allows Unicode. value might not have type str now
return value.replace("&", "&")\
.replace('"', """)\
.replace("<", "<")\
.replace(">", ">")
# render HTML text.
# We only strip od some tags and allow some simple tags
# such as <h1>, <b> or <i> to be part of the string.
# This is useful for messages where we want to keep formatting
# options. (Formerly known as 'permissive_attrencode') """
# for the escaping functions
_unescaper_text = re.compile(r'<(/?)(h2|b|tt|i|br(?: /)?|pre|a|sup|p|li|ul|ol)>')
_unescaper_href = re.compile(r'<a href="(.*?)">')
def _escape_text(self, text):
if isinstance(text, HTML):
return text.value # This is HTML code which must not be escaped
text = self._escape_attribute(text)
text = self._unescaper_text.sub(r'<\1\2>', text)
# Also repair link definitions
text = self._unescaper_href.sub(r'<a href="\1">', text)
return text
#
# Rendering
#
def _render_attributes(self, **attrs):
# TODO: REMOVE AFTER REFACTORING IS DONE!!
for key in attrs:
assert key.rstrip('_') in ['class', 'id', 'src', 'type', 'name',\
'onclick', 'onsubmit', 'onmouseover', 'onmouseout', 'onfocus', 'value', \
'content', 'href', 'http-equiv', 'rel', 'for', 'title', 'target',\
'align', 'valign', 'style', 'width', 'height', 'colspan', 'data-role',\
'cellspacing', 'cellpadding', 'border'], key
for k, v in attrs.iteritems():
if v is None: continue
if not isinstance(v, list):
yield ' %s=\"%s\"' % (k.rstrip('_'), self._escape_attribute(v))
elif k in ["class", "class_"]:
yield ' %s=\"%s\"' % (k.rstrip('_'), ' '.join(a for a in (self._escape_attribute(vi) for vi in v) if a))
elif k == "style" or k.startswith('on'):
yield ' %s=\"%s;\"' % (k.rstrip('_'), re.sub(';+', ';', '; '.join(a for a in (self._escape_attribute(vi) for vi in v) if a)))
else:
yield ' %s=\"%s\"' % (k.rstrip('_'), '_'.join(a for a in (self._escape_attribute(vi) for vi in v) if a))
# applies attribute encoding to prevent code injections.
def _render_opening_tag(self, tag_name, close_tag=False, **attrs):
""" You have to replace attributes which are also python elements such as
'class', 'id', 'for' or 'type' using a trailing underscore (e.g. 'class_' or 'id_'). """
#self.indent_level += self.indent
if not attrs:
return "%s<%s%s>" % (' ' * (self.indent_level - self.indent),\
tag_name,\
' /' if close_tag else '')
else:
return "%s<%s%s%s>" % (' ' * (self.indent_level - self.indent),\
tag_name, ''.join(self._render_attributes(**attrs)),\
' /' if close_tag else '')
def _render_closing_tag(self, tag_name):
#self.indent_level -= self.indent if self.indent_level < 0 else 0
return "%s</%s>" % (' ' * self.indent_level, tag_name)
def _render_content_tag(self, tag_name, tag_content, **attrs):
return "%s%s%s%s" % (self._render_opening_tag(tag_name, **attrs),\
' ' * self.indent_level,\
self._escape_text(tag_content),\
self._render_closing_tag(tag_name))
# does not escape the script content
def _render_javascript(self, code):
return "<script language=\"javascript\">\n%s\n</script>\n" % code
# Write functionlity
# def write(self, text):
# raise NotImplementedError()
# This is used to create all the render_tag() and close_tag() functions
def __getattr__(self, name):
""" All closing tags can be called like this:
self.close_html(), self.close_tr(), etc. """
parts = name.split('_')
# generating the shortcut tag calls
if len(parts) == 1 and name in self._shortcut_tags:
return lambda content, **attrs: self.write(self._render_content_tag(name, content, **attrs))
# generating the open, close and render calls
elif len(parts) == 2:
what, tag_name = parts[0], parts[1]
if what == "open" and tag_name in self._tag_names:
return lambda **attrs: self.write(self._render_opening_tag(tag_name, **attrs))
elif what == "close" and tag_name in self._tag_names:
return lambda : self.write(self._render_closing_tag(tag_name))
elif what == "idle" and tag_name in self._tag_names:
return lambda **attrs: self.write(self._render_content_tag(tag_name, '', **attrs))
elif what == "render" and tag_name in self._tag_names:
return lambda content, **attrs: HTML(self._render_content_tag(tag_name, content, **attrs))
else:
return object.__getattribute__(self, name)
#
# HTML element methods
# If an argument is mandatory, it is used as default and it will overwrite an
# implicit argument (e.g. id_ will overwrite attrs["id"]).
#
#
# basic elements
#
def write_text(self, text):
""" Write text. Highlighting tags such as h2|b|tt|i|br|pre|a|sup|p|li|ul|ol are not escaped. """
self.write(self._escape_text(text))
def write_html(self, content):
""" Write HTML code directly, without escaping. """
self.write(content + "\n")
def comment(self, comment_text):
self.write("<!--%s-->" % self.encode_attribute(comment_text))
def meta(self, httpequiv=None, **attrs):
if httpequiv:
attrs['http-equiv'] = httpequiv
self.write(self._render_opening_tag('meta', close_tag=True, **attrs))
def base(self, target):
self.write(self._render_opening_tag('base', close_tag=True, target=target))
def open_a(self, href, **attrs):
attrs['href'] = href
self.write(self._render_opening_tag('a', **attrs))
def a(self, content, href, **attrs):
attrs['href'] = href
self.write(self._render_content_tag('a', content, **attrs))
def stylesheet(self, href):
self.write(self._render_opening_tag('link', rel="stylesheet", type_="text/css", href=href, close_tag=True))
#
# Helper functions to be used by snapins
#
def url_prefix(self):
raise NotImplementedError()
def render_link(self, text, url, target="main", onclick = None):
# Convert relative links into absolute links. We have three kinds
# of possible links and we change only [3]
# [1] protocol://hostname/url/link.py
# [2] /absolute/link.py
# [3] relative.py
if not (":" in url[:10]) and not url.startswith("javascript") and url[0] != '/':
url = self.url_prefix() + "check_mk/" + url
return self.render_a(text, class_="link", target=target or '', href=url,\
onfocus = "if (this.blur) this.blur();",\
onclick = onclick or None)
def link(self, text, url, target="main", onclick = None):
self.write(self.render_link(text, url, target=target, onclick=onclick))
def simplelink(self, text, url, target="main"):
self.link(text, url, target)
self.br()
def bulletlink(self, text, url, target="main", onclick = None):
self.open_li(class_="sidebar")
self.link(text, url, target, onclick)
self.close_li()
def iconlink(self, text, url, icon):
self.open_a(class_=["iconlink", "link"], target="main", href=url)
self.icon(icon=icon, help=None, cssclass="inline")
self.write_text(text)
self.close_a()
self.br()
def nagioscgilink(self, text, target):
self.open_li(class_="sidebar")
self.a(text, class_="link", target="main", href="%snagios/cgi-bin/%s" % (self.url_prefix(), target))
self.close_li()
#
# Scriptingi
#
def javascript(self, code):
self.write(self._render_javascript(code))
def javascript_file(self, name):
""" <script type="text/javascript" src="js/%(name)s.js"/>\n """
self.write(self._render_content_tag('script', '', type_="text/javascript", src='js/%s.js' % name))
def img(self, src, **attrs):
attrs['src'] = src
self.write(self._render_opening_tag('img', close_tag=True, **attrs))
def open_button(self, type_, **attrs):
attrs['type'] = type_
self.write(self._render_opening_tag('button', close_tag=True, **attrs))
def play_sound(self, url):
self.write(self._render_opening_tag('audio autoplay', src_=url))
#
# form elements
#
def label(self, content, for_, **attrs):
attrs['for'] = for_
self.write(self._render_content_tag('label', content, **attrs))
def input(self, name, type_, **attrs):
attrs['type_'] = type_
attrs['name'] = name
self.write(self._render_opening_tag('input', close_tag=True, **attrs))
#
# table elements
#
def td(self, content, **attrs):
""" Only for text content. You can't put HTML structure here. """
self.write(self._render_content_tag('td', content, **attrs))
#
# list elements
#
def li(self, content, **attrs):
""" Only for text content. You can't put HTML structure here. """
self.write(self._render_content_tag('li', content, **attrs))
#
# structural text elements
#
def heading(self, content):
""" <h2>%(content)</h2> """
self.write(self._render_content_tag('h2', content))
def br(self):
self.write('<br/>')
def hr(self, **attrs):
self.write(self._render_opening_tag('hr', close_tag=True, **attrs))
def rule(self):
self.hr()
#.
# .--HTML Check_MK-------------------------------------------------------.
# | _ _ _____ __ __ _ |
# | | | | |_ _| \/ | | |
# | | |_| | | | | |\/| | | |
# | | _ | | | | | | | |___ |
# | |_| |_| |_| |_| |_|_____| |
# | |
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |_____| |
# +----------------------------------------------------------------------+
# | A HTML generating class which introduces some logic of the Check_MK |
# | web application. |
# | It also contains various settings for how the page should be built. |
# '----------------------------------------------------------------------'
class HTMLCheck_MK(HTMLGenerator):
def __init__(self):
super(HTMLCheck_MK, self).__init__()
# rendering state
self.html_is_open = False
self.header_sent = False
# style options
self.body_classes = ['main']
self._default_stylesheets = [ "check_mk", "graphs" ]
self._default_javascripts = [ "checkmk", "graphs" ]
# behaviour options
self.render_headfoot = True
self.enable_debug = False
self.screenshotmode = False
self.help_visible = False
# browser options
self.output_format = "html"
self.browser_reload = 0
self.browser_redirect = ''
self.link_target = None
def default_html_headers(self):
self.meta(httpequiv="Content-Type", content="text/html; charset=utf-8")
self.meta(httpequiv="X-UA-Compatible", content="IE=edge")
self.write(self._render_opening_tag('link', rel="shortcut icon", href="images/favicon.ico", type_="image/ico", close_tag=True))
def _head(self, title, javascripts=None, stylesheets=None):
javascripts = javascripts if javascripts else []
stylesheets = stylesheets if stylesheets else ["pages"]
self.open_head()
self.default_html_headers()
self.title(title)
# If the variable _link_target is set, then all links in this page
# should be targetted to the HTML frame named by _link_target. This
# is e.g. useful in the dash-board
if self.link_target:
self.base(target=self.link_target)
# Load all specified style sheets and all user style sheets in htdocs/css
for css in self._default_stylesheets + stylesheets:
fname = self.css_filename_for_browser(css)
if fname is not None:
self.stylesheet(fname)
# write css for internet explorer
fname = self.css_filename_for_browser("ie")
if fname is not None:
self.write("<!--[if IE]>\n")
self.stylesheet(fname)
self.write("<![endif]-->\n")
self.add_custom_style_sheet()
# Load all scripts
for js in self._default_javascripts + javascripts:
filename_for_browser = self.javascript_filename_for_browser(js)
if filename_for_browser:
self.javascript_file(filename_for_browser)
if self.browser_reload != 0:
if self.browser_redirect != '':
self.javascript('set_reload(%s, \'%s\')' % (self.browser_reload, self.browser_redirect))
else:
self.javascript('set_reload(%s)' % (self.browser_reload))
self.close_head()
def html_head(self, title, javascripts=None, stylesheets=None, force=False):
force_new_document = force # for backward stability and better readability
#TODO: html_is_open?
if force_new_document:
self.header_sent = False
if not self.header_sent:
self.write('<!DOCTYPE HTML>\n')
self.open_html()
self._head(title, javascripts, stylesheets)
self.header_sent = True
def header(self, title='', javascripts=None, stylesheets=None, force=False):
if self.output_format == "html":
if not self.header_sent:
self.body_start(title, javascripts=javascripts, stylesheets=stylesheets, force=force)
self.header_sent = True
if self.render_headfoot:
self.top_heading(title)
def body_start(self, title='', javascripts=None, stylesheets=None, force=False):
self.html_head(title, javascripts, stylesheets, force)
self.open_body(class_=self._get_body_css_classes())
def _get_body_css_classes(self):
if self.screenshotmode:
return self.body_classes + ["screenshotmode"]
else:
return self.body_classes
def add_custom_style_sheet(self):
raise NotImplementedError()
def css_filename_for_browser(self, css):
raise NotImplementedError()
def javascript_filename_for_browser(self, jsname):
raise NotImplementedError()
def html_foot(self):
self.close_html()
def top_heading(self, title):
raise NotImplementedError()
def top_heading_left(self, title):
self.open_table(class_="header")
self.open_tr()
self.open_td(width="*", class_="heading")
self.a(title, href="#", onfocus="if (this.blur) this.blur();",
onclick="this.innerHTML=\'%s\'; document.location.reload();" % _("Reloading..."))
self.close_td()
def top_heading_right(self):
cssclass = "active" if self.help_visible else "passive"
self.icon_button(None, _("Toggle context help texts"), "help", id="helpbutton",
onclick="toggle_help()", style="display:none", ty="icon", cssclass=cssclass)
self.open_a(href=_("http://mathias-kettner.de"), class_="head_logo")
self.img(src="images/logo_cmk_small.png")
self.close_a()
self.close_td()
self.close_tr()
self.close_table()
self.hr(class_="header")
if self.enable_debug:
self._dump_get_vars()
#
# HTML form rendering
#
def detect_icon_path(self, icon_name):
raise NotImplementedError()
def icon(self, help, icon, **kwargs):
#TODO: Refactor
title = help
self.write(self.render_icon(icon_name=icon, help=title, **kwargs))
def empty_icon(self):
self.write(self.render_icon("images/trans.png"))
def render_icon(self, icon_name, help=None, middle=True, id=None, cssclass=None):
# TODO: Refactor
title = help
id_ = id
attributes = {'title' : title,
'id' : id_,
'class' : ["icon", cssclass],
'align' : 'absmiddle' if middle else None,
'src' : icon_name if "/" in icon_name else self.detect_icon_path(icon_name)}
return self._render_opening_tag('img', close_tag=True, **attributes)
def render_icon_button(self, url, help, icon, id=None, onclick=None,
style=None, target=None, cssclass=None, ty="button"):
# TODO: Refactor
title = help
id_ = id
# TODO: Can we clean this up and move all button_*.png to internal_icons/*.png?
if ty == "button":
icon = "images/button_" + icon + ".png"
icon = HTML(self.render_icon(icon, cssclass="iconbutton"))
return self.render_a(icon, **{'title' : title,
'id' : id_,
'class' : cssclass,
'style' : style,
'target' : target if target else '',
'href' : url if not onclick else "javascript:void(0)",
'onfocus' : "if (this.blur) this.blur();",
'onclick' : onclick })
def icon_button(self, *args, **kwargs):
self.write(self.render_icon_button(*args, **kwargs))
#.
class DeprecationWrapper(HTMLCheck_MK):
# Only strip off some tags. We allow some simple tags like
# <b>, <tt>, <i> to be part of the string. This is useful
# for messages where we still want to have formating options.
def permissive_attrencode(self, obj):
return self._escape_text(obj)
# Encode HTML attributes: replace " with ", also replace
# < and >. This code is slow. Works on str and unicode without
# changing the type. Also works on things that can be converted
# with %s.
def attrencode(self, value):
return self._escape_attribute(value)
#.
# .--html----------------------------------------------------------------.
# | _ _ _ |
# | | |__ | |_ _ __ ___ | | |
# | | '_ \| __| '_ ` _ \| | |
# | | | | | |_| | | | | | | |
# | |_| |_|\__|_| |_| |_|_| |
# | |
# +----------------------------------------------------------------------+
# | Caution! The class needs to be derived from Outputfunnel first! |
# '----------------------------------------------------------------------'
class html(DeprecationWrapper):
def __init__(self):
super(html, self).__init__()
self.myfile = None
self.cookies = {}
self._user_id = None
self.user_errors = {}
self.focus_object = None
self.events = set([]) # currently used only for sounds
self.status_icons = {}
self.final_javascript_code = ""
self.auto_id = 0
self.caches = {}
self.treestates = None
self.new_transids = []
self.ignore_transids = False
self.current_transid = None
self.page_context = {}
self._request_timeout = 110 # seconds
# Settings
self.have_help = False
self.io_error = False
self.mobile = False
self.buffering = True
self.keybindings_enabled = True
self.keybindings = []
self.context_buttons_open = False
# Forms
self.form_name = None
self.form_vars = []
# Variable management
self.vars = {}
self.listvars = {} # for variables with more than one occurrance
self.uploads = {}
self.var_stash = []
# Time measurement
self.times = {}
self.start_time = time.time()
self.last_measurement = self.start_time
RETURN = 13
SHIFT = 16
CTRL = 17
ALT = 18
BACKSPACE = 8
F1 = 112
def set_user_id(self, user_id):
self._user_id = user_id
def is_mobile(self):
return self.mobile
def is_api_call(self):
return self.output_format != "html"
def get_user_agent(self):
raise NotImplementedError()
def get_referer(self):
raise NotImplementedError()
# The system web servers configured request timeout. This is the time
# before the request is terminated from the view of the client.
def client_request_timeout(self):
raise NotImplementedError()
def is_ssl_request(self):
raise NotImplementedError()
def request_method(self):
raise NotImplementedError()
def set_page_context(self, c):
self.page_context = c
def set_buffering(self, b):
self.buffering = b
# TODO: Can this please be dropped?
def some_id(self):
self.auto_id += 1
return "id_%d" % self.auto_id
def set_output_format(self, f):
self.output_format = f
if f == "json":
content_type = "application/json; charset=UTF-8"
elif f == "jsonp":
content_type = "application/javascript; charset=UTF-8"
elif f in ("csv", "csv_export"): # Cleanup: drop one of these
content_type = "text/csv; charset=UTF-8"
elif f == "python":
content_type = "text/plain; charset=UTF-8"
elif f == "text":
content_type = "text/plain; charset=UTF-8"
elif f == "html":
content_type = "text/html; charset=UTF-8"
elif f == "xml":
content_type = "text/xml; charset=UTF-8"
elif f == "pdf":
content_type = "application/pdf"
else:
raise MKGeneralException(_("Unsupported context type '%s'") % f)
self.set_content_type(content_type)
def set_content_type(self, ty):
raise NotImplementedError()
def set_link_target(self, framename):
self.link_target = framename
def set_focus(self, varname):
self.focus_object = (self.form_name, varname)
def set_render_headfoot(self, render):
self.render_headfoot = render
def set_browser_reload(self, secs):
self.browser_reload = secs
def set_browser_redirect(self, secs, url):
self.browser_reload = secs
self.browser_redirect = url
def immediate_browser_redirect(self, secs, url):
self.javascript("set_reload(%s, '%s');" % (secs, url))
def add_body_css_class(self, cls):
self.body_classes.append(cls)
def add_status_icon(self, img, tooltip, url = None):
if url:
self.status_icons[img] = tooltip, url
else:
self.status_icons[img] = tooltip
def final_javascript(self, code):
self.final_javascript_code += code + "\n"
def reload_sidebar(self):
if not self.has_var("_ajaxid"):
self.javascript("reload_sidebar()")
def http_redirect(self, url):
raise MKGeneralException("http_redirect not implemented")
#
# Request processing
#
def get_unicode_input(self, varname, deflt = None):
try:
return self.var_utf8(varname, deflt)
except UnicodeDecodeError:
raise MKUserError(varname, _("The given text is wrong encoded. "
"You need to provide a UTF-8 encoded text."))
def var(self, varname, deflt = None):
return self.vars.get(varname, deflt)
def has_var(self, varname):
return varname in self.vars
# Checks if a variable with a given prefix is present
def has_var_prefix(self, prefix):
for varname in self.vars:
if varname.startswith(prefix):
return True
return False
def var_utf8(self, varname, deflt = None):
val = self.vars.get(varname, deflt)
if val != None and type(val) != unicode:
return val.decode("utf-8")
else:
return val
def all_vars(self):
return self.vars
def all_varnames_with_prefix(self, prefix):
for varname in self.vars.keys():
if varname.startswith(prefix):
yield varname
# Return all values of a variable that possible occurs more
# than once in the URL. note: self.listvars does contain those
# variable only, if the really occur more than once.
def list_var(self, varname):
if varname in self.listvars:
return self.listvars[varname]
elif varname in self.vars:
return [self.vars[varname]]
else:
return []
# Adds a variable to listvars and also set it
def add_var(self, varname, value):
self.listvars.setdefault(varname, [])
self.listvars[varname].append(value)
self.vars[varname] = value
def set_var(self, varname, value):
if value == None:
self.del_var(varname)
else:
self.vars[varname] = value
def del_var(self, varname):
if varname in self.vars:
del self.vars[varname]
if varname in self.listvars:
del self.listvars[varname]
def del_all_vars(self, prefix = None):
if not prefix:
self.vars = {}
self.listvars = {}
else:
self.vars = dict([(k,v) for (k,v) in self.vars.iteritems()
if not k.startswith(prefix)])
self.listvars = dict([(k,v) for (k,v) in self.listvars.iteritems()
if not k.startswith(prefix)])
def stash_vars(self):
self.var_stash.append(self.vars.copy())
def unstash_vars(self):
self.vars = self.var_stash.pop()
def uploaded_file(self, varname, default = None):
return self.uploads.get(varname, default)
# Returns a dictionary containing all parameters the user handed over to this request.
# The concept is that the user can either provide the data in a single "request" variable,
# which contains the request data encoded as JSON, or provide multiple GET/POST vars which
# are then used as top level entries in the request object.
def get_request(self, exclude_vars=None):
if exclude_vars == None:
exclude_vars = []
request = json.loads(self.var("request", "{}"))
for key, val in self.all_vars().items():
if key not in [ "request", "output_format" ] + exclude_vars:
request[key] = val
return request
def parse_field_storage(self, fields, handle_uploads_as_file_obj = False):
self.vars = {}
self.listvars = {} # for variables with more than one occurrance
self.uploads = {}
# TODO: Fix this regex. +-\ selects all from + to \, not +, - and \!
varname_regex = re.compile('^[\w\d_.%+-\\\*]+$')
for field in fields.list:
varname = field.name
# To prevent variours injections, we only allow a defined set
# of characters to be used in variables
if not varname_regex.match(varname):
continue
# put uploaded file infos into separate storage
if field.filename is not None:
if handle_uploads_as_file_obj:
value = field.file
else:
value = field.value
self.uploads[varname] = (field.filename, field.type, value)
else: # normal variable
# Multiple occurrance of a variable? Store in extra list dict
if varname in self.vars:
if varname in self.listvars:
self.listvars[varname].append(field.value)
else:
self.listvars[varname] = [ self.vars[varname], field.value ]
# In the single-value-store the last occurrance of a variable
# has precedence. That makes appending variables to the current
# URL simpler.
self.vars[varname] = field.value
#
# Cookie handling
#
def has_cookie(self, varname):
return varname in self.cookies
def get_cookie_names(self):
return self.cookies.keys()
def cookie(self, varname, deflt):
try:
return self.cookies[varname].value
except:
return deflt
#
# URL building
#
# [('varname1', value1), ('varname2', value2) ]
def makeuri(self, addvars, remove_prefix=None, filename=None, delvars=None):
new_vars = [ nv[0] for nv in addvars ]
vars = [ (v, self.var(v))
for v in self.vars
if v[0] != "_" and v not in new_vars and (not delvars or v not in delvars) ]
if remove_prefix != None:
vars = [ i for i in vars if not i[0].startswith(remove_prefix) ]
vars = vars + addvars
if filename == None:
filename = self.urlencode(self.myfile) + ".py"
if vars:
return filename + "?" + self.urlencode_vars(vars)
else:
return filename
def makeuri_contextless(self, vars, filename=None):
if not filename:
filename = self.myfile + ".py"
if vars:
return filename + "?" + self.urlencode_vars(vars)
else:
return filename
def makeactionuri(self, addvars, filename=None):
return self.makeuri(addvars + [("_transid", self.get_transid())], filename=filename)
def makeactionuri_contextless(self, addvars, filename=None):
return self.makeuri_contextless(addvars + [("_transid", self.get_transid())], filename=filename)
#
# Encoding and escaping
#
# This function returns a str object, never unicode!
# Beware: this code is crucial for the performance of Multisite!
# Changing from the self coded urlencode to urllib.quote
# is saving more then 90% of the total HTML generating time
# on more complex pages!
def urlencode_vars(self, vars):
output = []
for varname, value in sorted(vars):
if type(value) == int:
value = str(value)
elif type(value) == unicode:
value = value.encode("utf-8")
try:
# urllib is not able to encode non-Ascii characters. Yurks
output.append(varname + '=' + urllib.quote(value))
except:
output.append(varname + '=' + self.urlencode(value)) # slow but working
return '&'.join(output)
def urlencode(self, value):
if type(value) == unicode:
value = value.encode("utf-8")
elif value == None:
return ""
ret = ""
for c in value:
if c == " ":
c = "+"
elif ord(c) <= 32 or ord(c) > 127 or c in [ '#', '+', '"', "'", "=", "&", ":", "%" ]:
c = "%%%02x" % ord(c)
ret += c
return ret
# Escape a variable name so that it only uses allowed charachters for URL variables
def varencode(self, varname):
if varname == None:
return "None"
if type(varname) == int:
return varname
ret = ""
for c in varname:
if not c.isdigit() and not c.isalnum() and c != "_":
ret += "%%%02x" % ord(c)
else:
ret += c
return ret
def u8(self, c):
if ord(c) > 127:
return "&#%d;" % ord(c)
else:
return c
def utf8_to_entities(self, text):
if type(text) != unicode:
return text
else:
return text.encode("utf-8")
# remove all HTML-tags
def strip_tags(self, ht):
if type(ht) not in [str, unicode]:
return ht
while True:
x = ht.find('<')
if x == -1:
break
y = ht.find('>', x)
if y == -1:
break
ht = ht[0:x] + ht[y+1:]
return ht.replace(" ", " ")
def strip_scripts(self, ht):
while True:
x = ht.find('<script')
if x == -1:
break
y = ht.find('</script>')
if y == -1:
break
ht = ht[0:x] + ht[y+9:]
return ht
#
# Debugging, diagnose and logging
#
def debug(self, *x):
import pprint
for element in x:
try:
formatted = pprint.pformat(element)
except UnicodeDecodeError:
formatted = repr(element)
self.lowlevel_write("<pre>%s</pre>\n" % self.attrencode(formatted))
def log(self, *args):
raise NotImplementedError()
#
# HTML form rendering
#
def begin_form(self, name, action = None, method = "GET",
onsubmit = None, add_transid = True):
self.form_vars = []
if action == None:
action = self.myfile + ".py"
self.current_form = name
if method.lower() == "post":
enctype = ' enctype="multipart/form-data"'
else:
enctype = ''
if onsubmit:
onsubmit = ' onsubmit="%s"' % self.attrencode(onsubmit)
else:
onsubmit = ''
enc_name = self.attrencode(name)
self.write('<form id="form_%s" name="%s" class="%s" action="%s" method="%s"%s%s>\n' %
(enc_name, enc_name, enc_name, self.attrencode(action), self.attrencode(method),
enctype, onsubmit))
self.hidden_field("filled_in", name, add_var=True)
if add_transid:
self.hidden_field("_transid", str(self.get_transid()))
self.form_name = name
def end_form(self):
self.write("</form>\n")
self.form_name = None
def in_form(self):
return self.form_name != None
def prevent_password_auto_completion(self):
# These fields are not really used by the form. They are used to prevent the browsers
# from filling the default password and previous input fields in the form
# with password which are eventually saved in the browsers password store.
self.write("<input type=\"text\" style=\"display:none;\">")
self.write("<input style=\"display:none\" type=\"password\">")
# Needed if input elements are put into forms without the helper
# functions of us. TODO: Should really be removed and cleaned up!
def add_form_var(self, varname):
self.form_vars.append(varname)
# Beware: call this method just before end_form(). It will
# add all current non-underscored HTML variables as hiddedn
# field to the form - *if* they are not used in any input
# field. (this is the reason why you must not add any further
# input fields after this method has been called).
def hidden_fields(self, varlist = None, **args):
add_action_vars = args.get("add_action_vars", False)
if varlist != None:
for var in varlist:
value = self.vars.get(var, "")
self.hidden_field(var, value)
else: # add *all* get variables, that are not set by any input!
for var, value in self.vars.items():
if var not in self.form_vars and \
(var[0] != "_" or add_action_vars): # and var != "filled_in":
self.hidden_field(var, value)
def hidden_field(self, *args, **kwargs):
self.write(self.render_hidden_field(*args, **kwargs))
def render_hidden_field(self, var, value, id=None, add_var=False):
if value == None:
return ""
if add_var:
self.add_form_var(var)
id = id and ' id="%s"' % self.attrencode(id) or ''
return "<input type=\"hidden\" name=\"%s\" value=\"%s\"%s />" % \
(self.attrencode(var), self.attrencode(value), id)
def image_button(self, varname, title, cssclass = '', style=None):
if not self.mobile:
self.write('<label for="%s" class="image_button"%s>' %
(self.attrencode(varname), style and (" style=\"%s\"" % style) or ""))
self.raw_button(varname, title, cssclass)
if not self.mobile:
self.write('</label>')
def button(self, *args, **kwargs):
self.image_button(*args, **kwargs)
def raw_button(self, varname, title, cssclass=""):
self.write("<input onfocus=\"if (this.blur) this.blur();\" "
"type=\"submit\" name=\"%s\" id=\"%s\" value=\"%s\" "
"class=\"%s\" />\n" % \
(varname, varname, title, cssclass))
self.add_form_var(varname)
def buttonlink(self, href, text, add_transid=False, obj_id='', style='', title='', disabled=''):
if add_transid:
href += "&_transid=%s" % self.get_transid()
if not obj_id:
obj_id = self.some_id()
obj_id = ' id=%s' % obj_id
if style:
style = ' style="%s"' % style
if title:
title = ' title="%s"' % title
if disabled:
title = ' disabled="%s"' % disabled
if not self.mobile:
self.write('<label for="%s" class="image_button">' % obj_id)
self.write('<input%s%s%s%s value="%s" class="buttonlink" type="button" onclick="location.href=\'%s\'" />\n' % \
(obj_id, style, title, disabled, text, href))
if not self.mobile:
self.write('</label>')
def empty_icon_button(self):
self.write(self.render_icon("images/trans.png", cssclass="iconbutton trans"))
def disabled_icon_button(self, icon):
self.write(self.render_icon(icon, cssclass="iconbutton"))
def jsbutton(self, varname, text, onclick, style=''):
if style:
style = ' style="%s"' % style
self.write("<input type=button name=%s id=%s onclick=\"%s\" "
"class=button%s value=\"%s\" />" % (varname, varname, onclick, style, text))
def number_input(self, varname, deflt = "", size=8, style="", submit=None):
if deflt != None:
deflt = str(deflt)
self.text_input(varname, deflt, "number", size=size, style=style, submit=submit)
def text_input(self, varname, default_value = "", cssclass = "text", label = None, id = None,
submit = None, attrs = {}, **args):
if default_value == None:
default_value = ""
addprops = ""
add_style = ""
if "size" in args and args["size"]:
if args["size"] == "max":
add_style = "width: 100%; "
else:
addprops += " size=\"%d\"" % (args["size"] + 1)
if not args.get('omit_css_width', False) and "width:" not in args.get("style", "") and not self.mobile:
add_style = "width: %d.8ex; " % args["size"]
if "type" in args:
mytype = args["type"]
else:
mytype = "text"
if "autocomplete" in args:
addprops += " autocomplete=\"%s\"" % args["autocomplete"]
if args.get("style"):
addprops += " style=\"%s%s\"" % (add_style, args["style"])
elif add_style:
addprops += " style=\"%s\"" % add_style
if args.get("read_only"):
addprops += " readonly"
if submit != None:
if not id:
id = "ti_%s" % varname
self.final_javascript('document.getElementById("%s").onkeydown = '
'function(e) { if (!e) e = window.event; textinput_enter_submit(e, "%s"); };'
% (id, submit))
value = self.vars.get(varname, default_value)
error = self.user_errors.get(varname)
html = ""
if error:
html = "<x class=\"inputerror\">"
if label:
if not id:
id = "ti_%s" % varname
html += '<label for="%s">%s</label>' % (id, label)
if id:
addprops += ' id="%s"' % id
attributes = ' ' + ' '.join([ '%s="%s"' % (k, self.attrencode(v)) for k, v in attrs.iteritems() ])
html += "<input type=\"%s\" class=\"%s\" value=\"%s\" name=\"%s\"%s%s />\n" % \
(mytype, cssclass, self.attrencode(value), varname, addprops, attributes)
if error:
html += "</x>"
self.set_focus(varname)
self.write(html)
self.form_vars.append(varname)
def password_input(self, varname, default_value = "", size=12, **args):
self.text_input(varname, default_value, type="password", size = size, **args)
def text_area(self, varname, deflt="", rows=4, cols=30, attrs = {}):
value = self.var(varname, deflt)
error = self.user_errors.get(varname)
if error:
self.write("<x class=inputerror>")
attributes = ' ' + ' '.join([ '%s="%s"' % (k, v) for k, v in attrs.iteritems() ])
self.write("<textarea style=\"width: %d.8ex\" rows=%d cols=%d name=\"%s\"%s>%s</textarea>\n" % (
cols, rows, cols, varname, attributes, self.attrencode(value)))
if error:
self.write("</x>")
self.set_focus(varname)
self.form_vars.append(varname)
def sorted_select(self, varname, choices, deflt="", onchange=None, attrs = {}):
# Sort according to display texts, not keys
sorted = choices[:]
sorted.sort(lambda a,b: cmp(a[1].lower(), b[1].lower()))
self.select(varname, sorted, deflt, onchange, attrs)
# Choices is a list pairs of (key, title). They keys of the choices
# and the default value must be of type None, str or unicode.
def select(self, varname, choices, deflt="", onchange=None, attrs = {}):
current = self.get_unicode_input(varname, deflt)
onchange_code = onchange and " onchange=\"%s\"" % (onchange) or ""
attrs.setdefault('size', 1)
attributes = ' ' + ' '.join([ '%s="%s"' % (k, v) for k, v in attrs.iteritems() ])
error = self.user_errors.get(varname)
if error:
self.write("<x class=\"inputerror\">")
self.write("<select%s name=\"%s\" id=\"%s\"%s>\n" %
(onchange_code, varname, varname, attributes))
for value, text in choices:
if value == None:
value = ""
sel = value == current and " selected" or ""
self.write("<option value=\"%s\"%s>%s</option>\n" %
(self.attrencode(value), sel, self.attrencode(text)))
self.write("</select>\n")
if error:
self.write("<x class=\"inputerror\">")
if varname:
self.form_vars.append(varname)
def icon_select(self, varname, options, deflt=""):
current = self.var(varname, deflt)
self.write("<select class=icon name=\"%s\" id=\"%s\" size=\"1\">\n" %
(varname, varname))
for value, text, icon in options:
if value == None: value = ""
sel = value == current and " selected" or ""
self.write('<option style="background-image:url(images/icon_%s.png);" '
'value=\"%s\"%s>%s</option>\n' %
(icon, self.attrencode(value), sel, self.attrencode(text)))
self.write("</select>\n")
if varname:
self.form_vars.append(varname)
def begin_radio_group(self, horizontal=False):
if self.mobile:
if horizontal:
add = 'data-type="horizontal" '
else:
add = ''
self.write('<fieldset %s data-role="controlgroup">' % add)
def end_radio_group(self):
if self.mobile:
self.write('</fieldset>')
def radiobutton(self, varname, value, checked, label):
if self.has_var(varname):
checked = self.var(varname) == value
checked_text = checked and " checked" or ""
if label:
id = "rb_%s_%s" % (varname, self.attrencode(value))
idtxt = ' id="%s"' % id
else:
idtxt = ""
self.write("<input type=radio name=%s value=\"%s\"%s%s>\n" %
(varname, self.attrencode(value), checked_text, idtxt))
if label:
self.write('<label for="%s">%s</label>\n' % (id, label))
self.form_vars.append(varname)
def begin_checkbox_group(self, horizonal=False):
self.begin_radio_group(horizonal)
def end_checkbox_group(self):
self.end_radio_group()
def checkbox(self, *args, **kwargs):
self.write(self.render_checkbox(*args, **kwargs))
def render_checkbox(self, varname, deflt=False, cssclass = '', onclick = None, label=None,
id=None, add_attr = None):
if add_attr == None:
add_attr = [] # do not use [] as default element, it will be a global variable!
code = ""
error = self.user_errors.get(varname)
if error:
code += "<x class=inputerror>"
code += "<span class=checkbox>"
# Problem with checkboxes: The browser will add the variable
# only to the URL if the box is checked. So in order to detect
# wether we should add the default value, we need to detect
# if the form is printed for the first time. This is the
# case if "filled_in" is not set.
value = self.get_checkbox(varname)
if value == None: # form not yet filled in
value = deflt
checked = value and " CHECKED " or ""
if cssclass:
cssclass = ' class="%s"' % cssclass
onclick_code = onclick and " onclick=\"%s\"" % (onclick) or ""
if label and not id:
id = "cb_" + varname
if id:
add_attr.append('id="%s"' % id)
add_attr_code = ''
if add_attr:
add_attr_code = ' ' + ' '.join(add_attr)
code += "<input type=checkbox name=\"%s\"%s%s%s%s>\n" % \
(varname, checked, cssclass, onclick_code, add_attr_code)
self.form_vars.append(varname)
if label:
code += '<label for="%s">%s</label>\n' % (id, label)
code += "</span>"
if error:
code += "</x>"
return code
def upload_file(self, varname):
error = self.user_errors.get(varname)
if error:
self.write("<x class=inputerror>")
self.write('<input type="file" name="%s">' % varname)
if error:
self.write("</x>")
self.form_vars.append(varname)
def show_user_errors(self):
if self.has_user_errors():
self.write('<div class=error>\n')
self.write('<br>'.join(self.user_errors.values()))
self.write('</div>\n')
# The confirm dialog is normally not a dialog which need to be protected
# by a transid itselfs. It is only a intermediate step to the real action
# But there are use cases where the confirm dialog is used during rendering
# a normal page, for example when deleting a dashlet from a dashboard. In
# such cases, the transid must be added by the confirm dialog.
# add_header: A title can be given to make the confirm method render the HTML
# header when showing the confirm message.
def confirm(self, msg, method="POST", action=None, add_transid=False, add_header=False):
if self.var("_do_actions") == _("No"):
# User has pressed "No", now invalidate the unused transid
self.check_transaction()
return # None --> "No"
if not self.has_var("_do_confirm"):
if add_header != False:
self.header(add_header)
if self.mobile:
self.write('<center>')
self.write("<div class=really>%s" % self.permissive_attrencode(msg))
# FIXME: When this confirms another form, use the form name from self.vars()
self.begin_form("confirm", method=method, action=action, add_transid=add_transid)
self.hidden_fields(add_action_vars = True)
self.button("_do_confirm", _("Yes!"), "really")
self.button("_do_actions", _("No"), "")
self.end_form()
self.write("</div>")
if self.mobile:
self.write('</center>')
return False # False --> "Dialog shown, no answer yet"
else:
# Now check the transaction
return self.check_transaction() and True or None # True: "Yes", None --> Browser reload of "yes" page
#
# Form submission and variable handling
#
# Check if the current form is currently filled in (i.e. we display
# the form a second time while showing value typed in at the first
# time and complaining about invalid user input)
def form_filled_in(self, form_name = None):
if form_name == None:
form_name = self.form_name
return self.has_var("filled_in") and (
form_name == None or \
form_name in self.list_var("filled_in"))
def do_actions(self):
return self.var("_do_actions") not in [ "", None, _("No") ]
def form_submitted(self, form_name=None):
if form_name:
return self.var("filled_in") == form_name
else:
return self.has_var("filled_in")
def add_user_error(self, varname, msg_or_exc):
if isinstance(msg_or_exc, Exception):
message = "%s" % msg_or_exc
else:
message = msg_or_exc
if type(varname) == list:
for v in varname:
self.add_user_error(v, message)
else:
self.user_errors[varname] = message
def has_user_errors(self):
return len(self.user_errors) > 0
# Get value of checkbox. Return True, False or None. None means
# that no form has been submitted. The problem here is the distintion
# between False and None. The browser does not set the variables for
# Checkboxes that are not checked :-(
def get_checkbox(self, varname, form_name = None):
if self.has_var(varname):
return not not self.var(varname)
elif not self.form_filled_in(form_name):
return None
else:
# Form filled in but variable missing -> Checkbox not checked
return False
# TODO: Remove this specific legacy function. Change code using this to valuespecs
def datetime_input(self, varname, default_value, submit=None):
try:
t = self.get_datetime_input(varname)
except:
t = default_value
if varname in self.user_errors:
self.add_user_error(varname + "_date", self.user_errors[varname])
self.add_user_error(varname + "_time", self.user_errors[varname])
self.set_focus(varname + "_date")
br = time.localtime(t)
self.date_input(varname + "_date", br.tm_year, br.tm_mon, br.tm_mday, submit=submit)
self.write(" ")
self.time_input(varname + "_time", br.tm_hour, br.tm_min, submit=submit)
self.form_vars.append(varname + "_date")
self.form_vars.append(varname + "_time")
# TODO: Remove this specific legacy function. Change code using this to valuespecs
def time_input(self, varname, hours, mins, submit=None):
self.text_input(varname, "%02d:%02d" % (hours, mins), cssclass="time", size=5,
submit=submit, omit_css_width = True)
# TODO: Remove this specific legacy function. Change code using this to valuespecs
def date_input(self, varname, year, month, day, submit=None):
self.text_input(varname, "%04d-%02d-%02d" % (year, month, day),
cssclass="date", size=10, submit=submit, omit_css_width = True)
# TODO: Remove this specific legacy function. Change code using this to valuespecs
def get_datetime_input(self, varname):
t = self.var(varname + "_time")
d = self.var(varname + "_date")
if not t or not d:
raise MKUserError([varname + "_date", varname + "_time"],
_("Please specify a date and time."))
try:
br = time.strptime(d + " " + t, "%Y-%m-%d %H:%M")
except:
raise MKUserError([varname + "_date", varname + "_time"],
_("Please enter the date/time in the format YYYY-MM-DD HH:MM."))
return int(time.mktime(br))
# TODO: Remove this specific legacy function. Change code using this to valuespecs
def get_time_input(self, varname, what):
t = self.var(varname)
if not t:
raise MKUserError(varname, _("Please specify %s.") % what)
try:
h, m = t.split(":")
m = int(m)
h = int(h)
if m < 0 or m > 59 or h < 0:
raise Exception()
except:
raise MKUserError(varname, _("Please enter the time in the format HH:MM."))
return m * 60 + h * 3600
#
# HTML - All the common and more complex HTML rendering methods
#
def show_info(self, msg):
self.message(msg, 'message')
def show_error(self, msg):
self.message(msg, 'error')
def show_warning(self, msg):
self.message(msg, 'warning')
# obj might be either a string (str or unicode) or an exception object
def message(self, obj, what='message'):
if what == 'message':
cls = 'success'
prefix = _('MESSAGE')
elif what == 'warning':
cls = 'warning'
prefix = _('WARNING')
else:
cls = 'error'
prefix = _('ERROR')
msg = self.permissive_attrencode(obj)
if self.output_format == "html":
if self.mobile:
self.write('<center>')
self.write("<div class=%s>%s</div>\n" % (cls, msg))
if self.mobile:
self.write('</center>')
else:
self.write('%s: %s\n' % (prefix, self.strip_tags(msg)))
#self.guitest_record_output("message", (what, msg))
def show_localization_hint(self):
url = "wato.py?mode=edit_configvar&varname=user_localizations"
self.message(HTML("<sup>*</sup>" +
_("These texts may be localized depending on the users' "
"language. You can configure the localizations "
"<a href=\"%s\">in the global settings</a>.") % url))
# Embed help box, whose visibility is controlled by a global
# button in the page.
def help(self, text):
if text and text.strip():
self.have_help = True
self.write('<div class=help style="display: %s">' % (
not self.help_visible and "none" or "block"))
self.write(text.strip())
self.write('</div>')
def _dump_get_vars(self):
self.begin_foldable_container("html", "debug_vars", True, _("GET/POST variables of this page"))
self.debug_vars(hide_with_mouse = False)
self.end_foldable_container()
def footer(self):
if self.output_format == "html":
self.bottom_footer()
self.body_end()
def bottom_footer(self):
if self.header_sent:
self.bottom_focuscode()
if self.render_headfoot:
self.write("<table class=footer><tr>")
self.write("<td class=left>")
self._write_status_icons()
self.write("</td>")
self.write("<td class=middle></td>"
"<td class=right>")
self.write("<div style=\"display:%s\" id=foot_refresh>%s</div>" % (
(self.browser_reload and "inline-block" or "none",
_("refresh: <div id=foot_refresh_time>%s</div> secs") % self.browser_reload)))
self.write("</td></tr></table>")
def bottom_focuscode(self):
if self.focus_object:
formname, varname = self.focus_object
obj = formname + "." + varname
self.write("<script language=\"javascript\" type=\"text/javascript\">\n"
"<!--\n"
"if (document.%s) {"
" document.%s.focus();\n"
" document.%s.select();\n"
"}\n"
"// -->\n"
"</script>\n" % (obj, obj, obj))
def body_end(self):
if self.have_help:
self.javascript("enable_help();")
if self.keybindings_enabled and self.keybindings:
self.javascript("var keybindings = %s;\n"
"document.body.onkeydown = keybindings_keydown;\n"
"document.body.onkeyup = keybindings_keyup;\n"
"document.body.onfocus = keybindings_focus;\n" %
json.dumps(self.keybindings))
if self.final_javascript_code:
self.javascript(self.final_javascript_code)
self.write("</body></html>\n")
# Hopefully this is the correct place to performe some "finalization" tasks.
self.store_new_transids()
def popup_trigger(self, *args, **kwargs):
self.write(self.render_popup_trigger(*args, **kwargs))
def render_popup_trigger(self, content, ident, what=None, data=None, url_vars=None,
style=None, menu_content=None, cssclass=None, onclose=None):
style = style and (' style="%s"' % style) or ""
src = '<div class="popup_trigger%s" id="popup_trigger_%s"%s>\n' % (cssclass and (" " + cssclass) or "", ident, style)
onclick = 'toggle_popup(event, this, \'%s\', %s, %s, %s, %s, %s)' % \
(ident, what and "'"+what+"'" or 'null',
data and self.attrencode(json.dumps(data)) or 'null',
url_vars and "'"+self.urlencode_vars(url_vars)+"'" or 'null',
menu_content and "'"+self.attrencode(menu_content)+"'" or 'null',
onclose and "'%s'" % onclose.replace("'", "\\'") or 'null')
src += '<a class="popup_trigger" href="javascript:void(0)" onclick="%s">\n' % onclick
src += content
src += '</a>'
src += '</div>\n'
return src
def _write_status_icons(self):
self.icon_button(self.makeuri([]), _("URL to this frame"),
"frameurl", target="_top", cssclass="inline")
self.icon_button("index.py?" + self.urlencode_vars([("start_url", self.makeuri([]))]),
_("URL to this page including sidebar"),
"pageurl", target="_top", cssclass="inline")
# TODO: Move this away from here. Make a context button. The view should handle this
if self.myfile == "view" and self.var('mode') != 'availability':
self.icon_button(self.makeuri([("output_format", "csv_export")]),
_("Export as CSV"),
"download_csv", target="_top", cssclass="inline")
if self.myfile == "view":
mode_name = self.var('mode') == "availability" and "availability" or "view"
encoded_vars = {}
for k, v in self.page_context.items():
if v == None:
v = ''
elif type(v) == unicode:
v = v.encode('utf-8')
encoded_vars[k] = v
self.popup_trigger(
self.render_icon("menu", _("Add this view to..."), cssclass="iconbutton inline"),
'add_visual', 'add_visual', data=[mode_name, encoded_vars, {'name': self.var('view_name')}],
url_vars=[("add_type", "view")])
for img, tooltip in self.status_icons.items():
if type(tooltip) == tuple:
tooltip, url = tooltip
self.icon_button(url, tooltip, img, cssclass="inline")
else:
self.icon(tooltip, img, cssclass="inline")
if self.times:
self.measure_time('body')
self.write('<div class=execution_times>')
entries = self.times.items()
entries.sort()
for name, duration in entries:
self.write("<div>%s: %.1fms</div>" % (name, duration * 1000))
self.write('</div>')
def debug_vars(self, prefix=None, hide_with_mouse=True, vars=None):
if not vars:
vars = self.vars
if hide_with_mouse:
hover = ' onmouseover="this.style.display=\'none\';"'
else:
hover = ""
self.write('<table %s class=debug_vars>' % hover)
self.write("<tr><th colspan=2>"+_("POST / GET Variables")+"</th></tr>")
for name, value in sorted(vars.items()):
if name in [ "_password", "password" ]:
value = "***"
if not prefix or name.startswith(prefix):
self.write("<tr><td class=left>%s</td><td class=right>%s</td></tr>\n" %
(self.attrencode(name), self.attrencode(value)))
self.write("</table>")
def begin_context_buttons(self):
if not self.context_buttons_open:
self.context_button_hidden = False
self.write("<table class=contextlinks><tr><td>\n")
self.context_buttons_open = True
def end_context_buttons(self):
if self.context_buttons_open:
if self.context_button_hidden:
self.write('<div title="%s" id=toggle class="contextlink short" '
% _("Show all buttons"))
self._context_button_hover_code("_short")
self.write("><a onclick='unhide_context_buttons(this);' href='#'>...</a></div>")
self.write("</td></tr></table>\n")
self.context_buttons_open = False
def context_button(self, title, url, icon=None, hot=False, id=None, bestof=None, hover_title='', fkey=None):
#self.guitest_record_output("context_button", (title, url, icon))
title = self.attrencode(title)
display = "block"
if bestof:
counts = self.get_button_counts()
weights = counts.items()
weights.sort(cmp = lambda a,b: cmp(a[1], b[1]))
best = dict(weights[-bestof:])
if id not in best:
display="none"
self.context_button_hidden = True
if not self.context_buttons_open:
self.begin_context_buttons()
title = "<span>%s</span>" % self.attrencode(title)
if icon:
title = '%s%s' % (self.render_icon(icon, cssclass="inline", middle=False), title)
if id:
idtext = " id='%s'" % self.attrencode(id)
else:
idtext = ""
self.write('<div%s style="display:%s" class="contextlink%s%s" ' %
(idtext, display, hot and " hot" or "", (fkey and self.keybindings_enabled) and " button" or ""))
self._context_button_hover_code(hot and "_hot" or "")
self.write('>')
self.write('<a href="%s"' % self.attrencode(url))
if hover_title:
self.write(' title="%s"' % self.attrencode(hover_title))
if bestof:
self.write(' onclick="count_context_button(this); " ')
if fkey and self.keybindings_enabled:
title += '<div class=keysym>F%d</div>' % fkey
self.add_keybinding([html.F1 + (fkey - 1)], "document.location='%s';" % self.attrencode(url))
self.write('>%s</a></div>\n' % title)
def get_button_counts(self):
raise NotImplementedError()
def _context_button_hover_code(self, what):
self.write(r'''onmouseover='this.style.backgroundImage="url(\"images/contextlink%s_hi.png\")";' ''' % what)
self.write(r'''onmouseout='this.style.backgroundImage="url(\"images/contextlink%s.png\")";' ''' % what)
def begin_foldable_container(self, treename, id, isopen, title, indent=True,
first=False, icon=None, fetch_url=None, title_url=None,
tree_img="tree"):
self.folding_indent = indent
if self._user_id:
isopen = self.foldable_container_is_open(treename, id, isopen)
onclick = ' onclick="toggle_foldable_container(\'%s\', \'%s\', \'%s\')"' % (
treename, id, fetch_url and fetch_url or '');
if indent == "nform":
self.write('<tr class=heading><td id="nform.%s.%s" %s colspan=2>' % (treename, id, onclick))
if icon:
self.write('<img class="treeangle title" src="images/icon_%s.png">' % self.attrencode(icon))
else:
self.write('<img align=absbottom class="treeangle nform %s" src="images/%s_closed.png">' %
("open" if isopen else "closed", tree_img))
self.write('%s</td></tr>' % self.attrencode(title))
else:
if not icon:
self.write('<img align=absbottom class="treeangle %s" id="treeimg.%s.%s" '
'src="images/%s_closed.png" %s>' %
("open" if isopen else "closed", treename, id, tree_img, onclick))
if isinstance(title, HTML): # custom HTML code
self.write(self.attrencode(title))
if indent != "form":
self.write("<br>")
else:
self.write('<b class="treeangle title" %s>' % (not title_url and onclick or ""))
if icon:
self.write('<img class="treeangle title" src="images/icon_%s.png">' % self.attrencode(icon))
if title_url:
self.write('<a href="%s">%s</a>' % (self.attrencode(title_url), self.attrencode(title)))
else:
self.write(self.attrencode(title))
self.write('</b><br>')
indent_style = "padding-left: %dpx; " % (indent == True and 15 or 0)
if indent == "form":
self.write("</td></tr></table>")
indent_style += "margin: 0; "
self.write('<ul class="treeangle %s" style="%s" id="tree.%s.%s">' %
(isopen and "open" or "closed", indent_style, treename, id))
# give caller information about current toggling state (needed for nform)
return isopen
def end_foldable_container(self):
if self.folding_indent != "nform":
self.write("</ul>")
def foldable_container_is_open(self, treename, id, isopen):
# try to get persisted state of tree
tree_state = self.get_tree_states(treename)
if id in tree_state:
isopen = tree_state[id] == "on"
return isopen
#
# Tree states
#
def get_tree_states(self, tree):
self.load_tree_states()
return self.treestates.get(tree, {})
def set_tree_state(self, tree, key, val):
self.load_tree_states()
if tree not in self.treestates:
self.treestates[tree] = {}
self.treestates[tree][key] = val
def set_tree_states(self, tree, val):
self.load_tree_states()
self.treestates[tree] = val
def load_tree_states(self):
raise NotImplementedError()
def save_tree_states(self):
raise NotImplementedError()
#
# Transaction IDs
#
def set_ignore_transids(self):
self.ignore_transids = True
# Compute a (hopefully) unique transaction id. This is generated during rendering
# of a form or an action link, stored in a user specific file for later validation,
# sent to the users browser via HTML code, then submitted by the user together
# with the action (link / form) and then validated if it is a known transid. When
# it is a known transid, it will be used and invalidated. If the id is not known,
# the action will not be processed.
def fresh_transid(self):
transid = "%d/%d" % (int(time.time()), random.getrandbits(32))
self.new_transids.append(transid)
return transid
def get_transid(self):
if not self.current_transid:
self.current_transid = self.fresh_transid()
return self.current_transid
# All generated transids are saved per user. They are stored in the transids.mk.
# Per user only up to 20 transids of the already existing ones are kept. The transids
# generated on the current page are all kept. IDs older than one day are deleted.
def store_new_transids(self):
if self.new_transids:
valid_ids = self.load_transids(lock = True)
cleared_ids = []
now = time.time()
for valid_id in valid_ids:
timestamp = valid_id.split("/")[0]
if now - int(timestamp) < 86400: # one day
cleared_ids.append(valid_id)
self.save_transids((cleared_ids[-20:] + self.new_transids))
# Remove the used transid from the list of valid ones
def invalidate_transid(self, used_id):
valid_ids = self.load_transids(lock = True)
try:
valid_ids.remove(used_id)
except ValueError:
return
self.save_transids(valid_ids)
# Checks, if the current transaction is valid, i.e. in case of
# browser reload a browser reload, the form submit should not
# be handled a second time.. The HTML variable _transid must be present.
#
# In case of automation users (authed by _secret in URL): If it is empty
# or -1, then it's always valid (this is used for webservice calls).
# This was also possible for normal users, but has been removed to preven
# security related issues.
def transaction_valid(self):
if not self.has_var("_transid"):
return False
id = self.var("_transid")
if self.ignore_transids and (not id or id == '-1'):
return True # automation
if '/' not in id:
return False
# Normal user/password auth user handling
timestamp = id.split("/", 1)[0]
# If age is too old (one week), it is always
# invalid:
now = time.time()
if now - int(timestamp) >= 604800: # 7 * 24 hours
return False
# Now check, if this id is a valid one
if id in self.load_transids():
#self.guitest_set_transid_valid()
return True
else:
return False
# Checks, if the current page is a transation, i.e. something
# that is secured by a transid (such as a submitted form)
def is_transaction(self):
return self.has_var("_transid")
# called by page functions in order to check, if this was
# a reload or the original form submission. Increases the
# transid of the user, if the latter was the case.
# There are three return codes:
# True: -> positive confirmation by the user
# False: -> not yet confirmed, question is being shown
# None: -> a browser reload or a negative confirmation
def check_transaction(self):
if self.transaction_valid():
id = self.var("_transid")
if id and id != "-1":
self.invalidate_transid(id)
return True
else:
return False
def load_transids(self, lock=False):
raise NotImplementedError()
def save_transids(self, used_ids):
raise NotImplementedError()
#
# Keyboard control
# TODO: Can we move this specific feature to AQ?
#
def add_keybinding(self, keylist, jscode):
self.keybindings.append([keylist, jscode])
def add_keybindings(self, bindings):
self.keybindings += bindings
def disable_keybindings(self):
self.keybindings_enabled = False
#
# Per request caching
#
def set_cache(self, name, value):
self.caches[name] = value
return value
def set_cache_default(self, name, value):
if self.is_cached(name):
return self.get_cached(name)
else:
return self.set_cache(name, value)
def is_cached(self, name):
return name in self.caches
def get_cached(self, name):
return self.caches.get(name)
def del_cache(self, name):
if name in self.caches:
del self.caches[name]
def measure_time(self, name):
self.times.setdefault(name, 0.0)
now = time.time()
elapsed = now - self.last_measurement
self.times[name] += elapsed
self.last_measurement = now
#
# Request timeout handling
#
# The system apache process will end the communication with the client after
# the timeout configured for the proxy connection from system apache to site
# apache. This is done in /omd/sites/[site]/etc/apache/proxy-port.conf file
# in the "timeout=x" parameter of the ProxyPass statement.
#
# The regular request timeout configured here should always be lower to make
# it possible to abort the page processing and send a helpful answer to the
# client.
#
# It is possible to disable the applications request timeout (temoporarily)
# or totally for specific calls, but the timeout to the client will always
# be applied by the system webserver. So the client will always get a error
# page while the site apache continues processing the request (until the
# first try to write anything to the client) which will result in an
# exception.
#
# The timeout of the Check_MK GUI request processing. When the timeout handling
# has been enabled with enable_request_timeout(), after this time an alarm signal
# will be raised to give the application the option to end the processing in a
# gentle way.
def request_timeout(self):
return self._request_timeout
def enable_request_timeout(self):
signal.signal(signal.SIGALRM, self.handle_request_timeout)
signal.alarm(self.request_timeout())
def disable_request_timeout(self):
signal.alarm(0)
def handle_request_timeout(self, signum, frame):
raise RequestTimeout(_("Your request timed out after %d seconds. This issue may be "
"related to a local configuration problem or a request which works "
"with a too large number of objects. But if you think this "
"issue is a bug, please send a crash report.") %
self.request_timeout())
| ypid-bot/check_mk | web/htdocs/htmllib.py | Python | gpl-2.0 | 90,601 |
"""
Django settings for uicloud project.
Generated by 'django-admin startproject' using Django 1.11.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=rjn@^6pk$6!)9nr!=c!1b^_t%-w-(mh&bf9q9ue)6wyb6ckjn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*"]
LOGIN_URL = '/uiaccounts/login/'
LOGIN_REDIRECT_URL = '/polls/'
# REST Framework
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [],
'TEST_REQUEST_DEFAULT_FORMAT': 'json'
}
# Application definition
INSTALLED_APPS = [
'uiaccounts',
'puppies',
'rest_framework',
'explorer',
'polls.apps.PollsConfig',
'cloudrestapi',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'uicloud.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'uicloud', 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'uicloud.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'sqlite3': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
},
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'djangodb',
'USER': 'root',
'PASSWORD': 'password',
'HOST': 'mysql1',
'PORT': '3306'
},
}
# 'postgresql': {
# 'ENGINE': 'django.db.backends.postgresql',
# 'NAME': 'djangodb',
# 'USER': 'user',
# 'PASSWORD': 'password',
# 'HOST': 'postgre1',
# 'PORT': '5432'
# },
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'uicloud', 'static'),
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(asctime)s %(pathname)s %(module)s %(lineno)d %(name)s %(levelname)s %(process)d %(message)s'
},
'normal': {
'format': '%(asctime)s %(pathname)s %(module)s %(levelname)s %(message)s'
},
'simple': {
'format': '%(asctime)s %(module)s %(levelname)s %(message)s'
},
},
'handlers': {
'basefile': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': '/home/django/logs/django.log',
'formatter': 'normal'
},
'rotatefile': {
'level': 'DEBUG',
'class': 'logging.handlers.TimedRotatingFileHandler',
'filename': '/home/django/logs/uicloud.log',
'when': 'D', # this specifies the interval
'interval': 1, # defaults to 1, only necessary for other values
'backupCount': 7, # how many backup file to keep, 10 days
'formatter': 'normal'
},
},
'loggers': {
'django': {
'handlers': ['basefile'],
'level': 'WARN',
'propagate': True,
},
'uicloud': {
'handlers': ['rotatefile'],
'level': 'DEBUG',
'propagate': True,
}
},
}
| hongchhe/mypython | django/projects/uicloud/uicloud/settings.py | Python | apache-2.0 | 5,473 |
#!/usr/bin/python
import functools
import math
# N.B. pygame coordinate system has origin in the upper-left, so these
# directions are y-inverted from standard Cartesian.
EAST = 0
NORTHEAST = math.pi * 1.75
NORTH = math.pi * 1.5
NORTHWEST = math.pi * 1.25
WEST = math.pi
SOUTHWEST = math.pi * 0.75
SOUTH = math.pi * 0.5
SOUTHEAST = math.pi * 0.25
@functools.total_ordering
class Coord:
def __init__(self, x, y):
self.x = x
self.y = y
def __eq__(self, rhs):
if isinstance(rhs, Coord):
return (self.x == rhs.x and
self.y == rhs.y)
return NotImplemented
def __lt__(self, rhs):
if isinstance(rhs, Coord):
if self.x == rhs.x:
return self.y < rhs.y
else:
return self.x < rhs.x
return NotImplemented
def __hash__(self):
# XXX - assuming x and y fit into 16b
return (int(self.x) << 16) | int(self.y)
def __repr__(self):
return '({}, {})'.format(self.x, self.y)
def __str__(self):
return '({}, {})'.format(self.x, self.y)
def distance(self, other):
"""Distance from self to other."""
xd = self.x - other.x
yd = self.y - other.y
return math.sqrt(xd * xd + yd * yd)
def bearing(self, other):
"""Returns angle in radians from self to other."""
if (self.x < other.x):
# Quadrant I or IV
if (self.y > other.y):
# Quadrant I
opp = other.y - self.y
adj = other.x - self.x
quad = 0.0
elif self.y < other.y:
# Quadrant IV
opp = self.x - other.x
adj = other.y - self.y
quad = math.pi * 0.5
else:
# Due east
return EAST
elif (self.x > other.x):
# Quadrant II or III
if (self.y > other.y):
# Quadrant II
opp = self.y - other.y
adj = self.x - other.x
quad = math.pi
elif (self.y < other.y):
# Quadrant III
opp = self.x - other.x
adj = other.y - self.y
quad = math.pi * 0.5
else:
# Due west
return WEST
else:
# Xs are equal
if (self.y < other.y):
# Due south
return SOUTH
elif (self.y > other.y):
# Due north
return NORTH
else:
return EAST # XXX arbitrary
def norm(angle):
"""Normalize angle to 0-2pi."""
twopi = 2.0 * math.pi
if angle < 0:
return norm(angle + twopi)
elif angle > twopi:
return norm(angle - twopi)
else:
return angle
if (adj == 0):
# XXX - this shouldn't be reachable, should be caught by
# the last 'else' above.
assert(False), "not reached"
else:
theta = math.atan(float(opp) / float(adj)) + quad
return norm(theta)
@staticmethod
def from_rect(r, centered=False):
if centered:
x = r.x + (r.width / 2.0)
y = r.y + (r.height / 2.0)
return Coord(x, y)
else:
return Coord(r.x, r.y)
def east(self):
return Coord(self.x + 1, self.y)
def south(self):
return Coord(self.x, self.y + 1)
def west(self):
return Coord(self.x - 1, self.y)
def north(self):
return Coord(self.x, self.y - 1)
| bstpierre/yendor | yendor/coord.py | Python | mit | 3,711 |
import os
import re
import yaml
try:
from packaging.version import parse as parse_version
except ImportError:
from pkg_resources import parse_version
from toolbox.config.common import BUTTON_CONFIG_KEYS, CRP_TYPES, CURRENT_MAX_VERSION, CURRENT_MIN_VERSION, PROTOCOLS
from .utils import counted_error, fatal_error
def compare_version(config: dict, min_version: str, max_version: str):
version = parse_version(config['version'])
if version < parse_version(min_version):
return -1
if version > parse_version(max_version):
return 1
return 0
def validate_version(config: dict):
cmp = compare_version(config, CURRENT_MIN_VERSION, CURRENT_MAX_VERSION)
if cmp < 0:
fatal_error('Please, upgrade to version %s with upgrade.py!', CURRENT_MIN_VERSION)
if cmp > 0:
fatal_error('Please, use a newer toolbox for version %s!', config['version'])
def get_crp_type(config: dict) -> str:
crp_type = config.get('crp_type') or 'static'
if crp_type not in CRP_TYPES:
fatal_error("Unknown crp_type: '%s' / %s", crp_type, CRP_TYPES)
return crp_type
def read_config(path: str, *, pre_validate: bool = True) -> dict:
"""
Read the config.yml file
:param path: path to the file or the base directory
:param pre_validate: check version and crp_type fields
:return: dict
"""
if os.path.isdir(path):
path = os.path.join(path, 'config.yml')
try:
with open(path, 'r') as f:
config = yaml.safe_load(f)
if pre_validate:
validate_version(config)
get_crp_type(config)
return config
except Exception as e:
fatal_error('%s(%s)', type(e).__name__, e)
def parse_bool(value) -> bool:
return str(value).lower() in ('true', '1')
def validate_bool(key, value):
if str(value).lower() not in ('true', 'false', '1', '0'):
counted_error('Invalid %s value. It must be boolean.', key)
def validate_flag(config: dict, flag_required: bool = False):
validate_bool('enable_flag_input', config.get('enable_flag_input'))
if config.get('flag'):
try:
if config['flag'][0:6] == 'regex:':
re.compile(config['flag'][6:])
except TypeError:
counted_error('Invalid flag value. It must be string.')
except Exception:
counted_error('Failed to compile regex flag.')
if not parse_bool(config.get('enable_flag_input')):
counted_error('enable_flag_input must be true for static flags.')
elif flag_required:
counted_error('A static (or regex) flag must be set.')
def validate_ports(ports: list, buttons: dict = None): # pylint: disable=too-many-branches
unique_ports = set()
ssh_ports_count = 0
for port in ports:
try:
port, protocol = port.split('/', 1)
unique_ports.add(port)
try:
if not 0 < int(port) < 65536:
raise ValueError
except Exception:
counted_error('Invalid port number: %s. Ports must be numbers between 1 and 65535.', port)
if protocol not in PROTOCOLS:
counted_error('Invalid protocol in config.yml: %s. Valid protocols: %s', protocol, PROTOCOLS)
if protocol == 'ssh':
ssh_ports_count += 1
except Exception:
counted_error('Invalid port format. [port/protocol]')
if len(unique_ports) != len(ports):
counted_error('Duplicate port numbers found.')
if ssh_ports_count > 1:
counted_error('More than one SSH ports. Please, use a single SSH connection.')
if buttons is not None:
if not isinstance(buttons, dict):
counted_error('The buttons field must be a dict.')
else:
for button_key, button in buttons.items():
if button_key not in ports:
counted_error('Button key %s is not found in ports.', button_key)
for key in button.keys():
if key not in BUTTON_CONFIG_KEYS:
counted_error('Key %s is invalid for button %s.', key, button_key)
| avatao-content/challenge-toolbox | toolbox/utils/config.py | Python | apache-2.0 | 4,202 |
"""
Update the IP addresses of your Cloudflare DNS records.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/cloudflare/
"""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.const import CONF_API_KEY, CONF_EMAIL, CONF_ZONE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_time_interval
REQUIREMENTS = ['pycfdns==0.0.1']
_LOGGER = logging.getLogger(__name__)
CONF_RECORDS = 'records'
DOMAIN = 'cloudflare'
INTERVAL = timedelta(minutes=60)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_ZONE): cv.string,
vol.Required(CONF_RECORDS): vol.All(cv.ensure_list, [cv.string]),
})
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up the Cloudflare component."""
from pycfdns import CloudflareUpdater
cfupdate = CloudflareUpdater()
email = config[DOMAIN][CONF_EMAIL]
key = config[DOMAIN][CONF_API_KEY]
zone = config[DOMAIN][CONF_ZONE]
records = config[DOMAIN][CONF_RECORDS]
def update_records_interval(now):
"""Set up recurring update."""
_update_cloudflare(cfupdate, email, key, zone, records)
def update_records_service(now):
"""Set up service for manual trigger."""
_update_cloudflare(cfupdate, email, key, zone, records)
track_time_interval(hass, update_records_interval, INTERVAL)
hass.services.register(
DOMAIN, 'update_records', update_records_service)
return True
def _update_cloudflare(cfupdate, email, key, zone, records):
"""Update DNS records for a given zone."""
_LOGGER.debug("Starting update for zone %s", zone)
headers = cfupdate.set_header(email, key)
_LOGGER.debug("Header data defined as: %s", headers)
zoneid = cfupdate.get_zoneID(headers, zone)
_LOGGER.debug("Zone ID is set to: %s", zoneid)
update_records = cfupdate.get_recordInfo(headers, zoneid, zone, records)
_LOGGER.debug("Records: %s", update_records)
result = cfupdate.update_records(headers, zoneid, update_records)
_LOGGER.debug("Update for zone %s is complete", zone)
if result is not True:
_LOGGER.warning(result)
| PetePriority/home-assistant | homeassistant/components/cloudflare/__init__.py | Python | apache-2.0 | 2,351 |
#!/usr/bin/env python
import pytest
import py.test
import datetime
from BeautifulSoup import BeautifulSoup
import wpl
import kpl
from data import Hold
from data import LoginError
from fakes import MyCard
from fakes import MyOpener
def test__parse_holds__numeric_position__reads_position():
response = BeautifulSoup(
'''<table>
<tr class="patFuncHeaders"><th>STATUS</th></tr>
<tr><td> 9 of 83 holds </td></tr>
</table>''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
assert (9, 83) == w.parse_holds(response)[0].status
def test__parse_holds__title_with_slash__reads_title():
response = BeautifulSoup(
'''<table>
<tr class="patFuncHeaders"><th> TITLE </th></tr>
<tr><td align="left"><a href="/BLAH"> Either/Or / Boo! </a></td></tr>
</table>''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
hold = w.parse_holds(response)[0]
assert ('Either/Or') == hold.title
def test__parse_holds__author_with_slash__reads_author():
response = BeautifulSoup(
'''<table>
<tr class="patFuncHeaders"><th> TITLE </th></tr>
<tr><td align="left"><a href="/BLAH"> JustOne / Bo/o! </a></td></tr>
</table>''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
hold = w.parse_holds(response)[0]
assert ('Bo/o!') == hold.author
@pytest.mark.parametrize("hold_text,expected_status", [
('Ready.', Hold.READY),
('IN TRANSIT', Hold.IN_TRANSIT),
('CHECK SHELVES', Hold.CHECK_SHELVES),
('TRACE', Hold.DELAYED),
])
def test__parse_holds__named_position__parses_position(hold_text, expected_status):
response = BeautifulSoup(
'''<table>
<tr class="patFuncHeaders"><th>STATUS</th></tr>
<tr><td> %s </td></tr>
</table>''' % hold_text)
w = wpl.LibraryAccount(MyCard(), MyOpener())
assert expected_status == w.parse_holds(response)[0].status
hold_with_pickup_dropdown = '''<table lang="en" class="patFunc"><tr class="patFuncTitle">
<th colspan="6" class="patFuncTitle">
6 HOLDS
</th>
</tr>
<tr class="patFuncHeaders">
<th class="patFuncHeaders"> CANCEL </th>
<th class="patFuncHeaders"> TITLE </th>
<th class="patFuncHeaders"> STATUS </th>
<th class="patFuncHeaders">PICKUP LOCATION</th>
<th class="patFuncHeaders"> CANCEL IF NOT FILLED BY </th>
<th class="patFuncHeaders"> FREEZE </th>
</tr>
<tr class="patFuncEntry">
<td class="patFuncMark" align="center">
<input type="checkbox" name="cancelb2193902x00" /></td>
<td class="patFuncTitle">
<a href="/patroninfo~S3/1307788/item&2193902"> Stories </a>
<br />
</td>
<td class="patFuncStatus"> 1 of 1 holds </td>
<td class="patFuncPickup"><select name=locb2193902x00>
<option value="ch+++" >Country Hills Library-KPL</option>
<option value="fh+++" >Forest Heights Library-KPL</option>
<option value="g++++" >Grand River Stanley Pk Lib-KPL</option>
<option value="m++++" >Main Library-KPL</option>
<option value="pp+++" >Pioneer Park Library-KPL</option>
<option value="w++++" >WPL Main Library</option>
<option value="wm+++" selected="selected">WPL McCormick Branch</option>
</select>
</td>
<td class="patFuncCancel">04-03-11</td>
<td class="patFuncFreeze" align="center"><input type="checkbox" name="freezeb2193902" /></td>
</tr>
</table>
'''
def test__parse_holds___pickup_dropdown__pickup_is_read():
response = BeautifulSoup(hold_with_pickup_dropdown)
w = wpl.LibraryAccount(MyCard(), MyOpener())
assert 'WPL McCormick Branch' == w.parse_holds(response)[0].pickup
def test__parse_holds___pickup_dropdown__pickup_is_string():
'''makes for better pickling'''
response = BeautifulSoup(hold_with_pickup_dropdown)
w = wpl.LibraryAccount(MyCard(), MyOpener())
assert str == type(w.parse_holds(response)[0].pickup) # noqa: E721 - need to check exact type
def test__parse_holds___with_expiration_date__reads_expiration():
response = BeautifulSoup(
'''<table>
<tr class="patFuncHeaders"><th>CANCEL IF NOT FILLED BY</th></tr>
<tr><td>04-03-11</td></tr>
</table>''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
hold = w.parse_holds(response)[0]
assert datetime.date(2011, 4, 3) == hold.expires
def test__parse_holds___frozen__added_to_status_notes():
response = BeautifulSoup(
'''<table>
<tr class="patFuncHeaders">
<th> FREEZE
</th>
</tr>
<tr>
<td class="patFuncFreeze" align="center">
<input type="checkbox" name="freezeb2186875" checked />
</td>
</tr>
</table>''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
assert w.parse_holds(response)[0].is_frozen()
def test__parse_holds___empty_freeze_field__is_not_frozen():
response = BeautifulSoup(
'''<table>
<tr class="patFuncHeaders"><th> FREEZE </th></tr>
<tr><td class="patFuncFreeze" align="center"> </td></tr>
</table>''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
assert not w.parse_holds(response)[0].is_frozen()
def test__parse_holds___hold_for_waterloo__finds_correct_url():
response = BeautifulSoup('''<table>
<tr class="patFuncHeaders">
<th> TITLE
</th>
</tr>
<tr class="patFuncEntry">
<td class="patFuncTitle">
<label for="canceli3337880x00">
<a href="/record=b2247789~S3"> The profession : a thriller / Steven Pressfield
</a>
</label>
<br />
</td>
</tr>
</table>''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
assert 'https://books.kpl.org/record=b2247789~S3' == w.parse_holds(response)[0].url
def test__parse_holds___hold_for_kitchener__finds_correct_url():
response = BeautifulSoup('''<table>
<tr class="patFuncHeaders"><th> TITLE </th></tr>
<tr class="patFuncEntry">
<td class="patFuncTitle">
<label for="cancelb2232976x09"><a href="/record=b2232976~S1"> Live wire / Harlan Coben. -- </a></label>
<br />
</td>
</tr>
</table>''')
k = kpl.LibraryAccount(MyCard(), MyOpener())
assert 'https://books.kpl.org/record=b2232976~S1' == k.parse_holds(response)[0].url
def test__parse_items__title_has_slash__parses_title():
response = BeautifulSoup(
'''
<table lang="en">
<tr class="patFuncHeaders">
<th> RENEW
</th>
<th> TITLE
</th>
<th > BARCODE
</th>
<th> STATUS
</th>
<th > CALL NUMBER
</th>
</tr>
<tr>
<td align="left">
<input type="checkbox" name="renew0" value="i3103561" />
</td>
<td align="left">
<a href="/patroninfo~S3/1307788/item&2160792"> The city/the city / China Mi\u00E9ville
</a>
</td>
<td align="left"> 33420011304806
</td>
<td align="left"> DUE 07-20-09
<span >Renewed 1 time
</span>
</td>
<td align="left"> MIEVI
</td>
</tr>
</table>
''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
item = w.parse_items(response)[0]
assert 'The city/the city' == item.title
def test__parse_items__author_has_accent__parses_author():
response = BeautifulSoup(
'''
<table lang="en">
<tr class="patFuncHeaders">
<th> RENEW
</th>
<th> TITLE
</th>
<th > BARCODE
</th>
<th> STATUS
</th>
<th > CALL NUMBER
</th>
</tr>
<tr>
<td align="left">
<input type="checkbox" name="renew0" value="i3103561" />
</td>
<td align="left">
<a href="/patroninfo~S3/1307788/item&2160792"> The city/the city / China Mi\u00E9ville
</a>
</td>
<td align="left"> 33420011304806
</td>
<td align="left"> DUE 07-20-09
<span >Renewed 1 time
</span>
</td>
<td align="left"> MIEVI
</td>
</tr>
</table>
''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
item = w.parse_items(response)[0]
assert 'China Mi\u00E9ville' == item.author
def test__parse_items__with_status_notes__finds_status_notes():
response = BeautifulSoup(
'''
<table lang="en">
<tr class="patFuncHeaders">
<th> RENEW
</th>
<th> TITLE
</th>
<th > BARCODE
</th>
<th> STATUS
</th>
<th > CALL NUMBER
</th>
</tr>
<tr>
<td align="left">
<input type="checkbox" name="renew0" value="i3103561" />
</td>
<td align="left">
<a href="/patroninfo~S3/1307788/item&2160792"> The city/the city / China Mi\u00E9ville
</a>
</td>
<td align="left"> 33420011304806
</td>
<td align="left"> DUE 07-20-09
<span >Renewed 1 time
</span>
</td>
<td align="left"> MIEVI
</td>
</tr>
</table>
''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
item = w.parse_items(response)[0]
assert ['Renewed 1 time'] == item.status_notes
def test__parse_items__span_in_title__all_text_in_title():
response = BeautifulSoup(
'''
<table lang="en">
<tr class="patFuncHeaders">
<th> RENEW
</th>
<th> TITLE
</th>
<th > BARCODE
</th>
<th> STATUS
</th>
<th > CALL NUMBER
</th>
</tr>
<tr class="patFuncEntry">
<td align="left" class="patFuncMark">
<input type="checkbox" name="renew3" id="renew3" value="i2626300" />
</td>
<td align="left" class="patFuncTitle">
<label for="renew3">
<a href="/record=b1945079~S3"> Hiking the redwood coast -- <span class="patFuncVol">2004</span>
</a>
</label>
<br />
</td>
<td align="left" class="patFuncBarcode"> 33420007964514
</td>
<td align="left" class="patFuncStatus"> DUE 05-29-10
</td>
<td align="left" class="patFuncCallNo"> 917.9404 Hik
</td>
</tr>
</table>
''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
item = w.parse_items(response)[0]
assert '''Hiking the redwood coast -- 2004''' == item.title
def test__parse_items__no_author__author_blank():
response = BeautifulSoup(
'''
<table lang="en">
<tr class="patFuncHeaders">
<th> RENEW
</th>
<th> TITLE
</th>
<th > BARCODE
</th>
<th> STATUS
</th>
<th > CALL NUMBER
</th>
</tr>
<tr class="patFuncEntry">
<td align="left" class="patFuncMark">
<input type="checkbox" name="renew3" id="renew3" value="i2626300" />
</td>
<td align="left" class="patFuncTitle">
<label for="renew3">
<a href="/record=b1945079~S3"> Hiking the redwood coast
</a>
</label>
<br />
</td>
<td align="left" class="patFuncBarcode"> 33420007964514
</td>
<td align="left" class="patFuncStatus"> DUE 05-29-10
</td>
<td align="left" class="patFuncCallNo"> 917.9404 Hik
</td>
</tr>
</table>
''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
item = w.parse_items(response)[0]
assert '' == item.author
def test__parse_status__status_notes_jammed_up_against_date__date_parsed():
response = BeautifulSoup(
'''
<table lang="en">
<tr class="patFuncHeaders">
<th> RENEW
</th>
<th> TITLE
</th>
<th > BARCODE
</th>
<th> STATUS
</th>
<th > CALL NUMBER
</th>
</tr>
<tr>
<td align="left">
<input type="checkbox" name="renew0" value="i3103561" />
</td>
<td align="left">
<a href="/patroninfo~S3/1307788/item&2160792"> The city/the city / China Mi\u00E9ville
</a>
</td>
<td align="left"> 33420011304806
</td>
<td align="left"> DUE 10-07-09IN LIBRARY USE
</span>
</td>
<td align="left"> MIEVI
</td>
</tr>
</table>
''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
item = w.parse_items(response)[0]
assert 'The city/the city' == item.title
assert 'China Mi\u00E9ville' == item.author
assert datetime.date(2009, 10, 7) == item.status
def test__parse_status__status_notes_jammed_up_against_date__status_notes_found():
response = BeautifulSoup(
'''
<table lang="en">
<tr class="patFuncHeaders">
<th> RENEW
</th>
<th> TITLE
</th>
<th > BARCODE
</th>
<th> STATUS
</th>
<th > CALL NUMBER
</th>
</tr>
<tr>
<td align="left">
<input type="checkbox" name="renew0" value="i3103561" />
</td>
<td align="left">
<a href="/patroninfo~S3/1307788/item&2160792"> The city/the city / China Mi\u00E9ville
</a>
</td>
<td align="left"> 33420011304806
</td>
<td align="left"> DUE 10-07-09IN LIBRARY USE
</span>
</td>
<td align="left"> MIEVI
</td>
</tr>
</table>
''')
w = wpl.LibraryAccount(MyCard(), MyOpener())
item = w.parse_items(response)[0]
assert ['IN LIBRARY USE'] == item.status_notes
failing_login_response = '''
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<link rel="stylesheet" type="text/css" href="/scripts/ProStyles.css" />
<link rel="stylesheet" type="text/css" href="/screens/w-stylesheet3.css" />
<title>Library Log in
</title>
<meta http-equiv="X-UA-Compatible" content="IE=8;FF=3;OtherUA=4" />
<meta http-equiv="Content-type" content="text/html;charset=UTF-8" />
<meta name="viewport" content="width=device-width,user-scalable=no" />
<script type="text/javascript" src="/scripts/common.js">
</script>
<!--<link rel="stylesheet" type="text/css" href="/apps/CAS/resources/login_mobile.css">
<link rel="stylesheet" type="text/css"
href="/apps/CAS/resources/login_s3_html.css" media="screen and (min-device-width: 481px)"> -->
<link rel="stylesheet" type="text/css" href="/apps/CAS/resources/login_mobile_s3.css" />
<style type="text/css" media="screen and (min-width: 481px)">
<!--
@import url("/apps/CAS/resources/login_s3_html.css");
-->
</style>
</head>
<body id="cas">
<!--[if IE]>
<div id="ie">
<![endif]-->
<div class="loginPage">
<div class="loginTop">
<!-- Prevent div collapse -->
<div class="loginTopLogo">
<a href="http://www.wpl.ca" tabindex="0">
<img src="/screens/w-logo-137x60.gif" alt="">
</a>
</div>
</div>
<!-- Use for library-only authentication: -->
<div class="loginArea loginArea1Col">
<!--<div style="text-align:center; background:#FF0077; color:black;" >
<strong>Please Note:</strong>
Holds placed online are currently not working.
Please call us at 519-886-1310 to have staff help you place holds.
</div>-->
<div class="clearfloats">
</div>
<!--end theForm1-->
<form id="fm1" class="fm-v clearfix" method="post"
action="/iii/cas/login?service=https%3A%2F%2Fencore.kpl.org%3A443%2Fiii%2Fencore...">
<!--display any errors-->
<div id="status" class="errors">Sorry, the information you submitted was invalid. Please try again.
</div>
<!-- Message from client webapp to be displayed on the CAS login screen -->
<div id="clientmessage">
<!--display any errors-->
</div>
<!-- end clientmessage -->
<!--start theForm2-->
<!-- Message from client webapp to be displayed on the CAS login screen -->
<div id="clientmessage">
<!--display any errors-->
</div>
<!-- end clientmessage -->
<!--display login form-->
<span style="padding-left:1.8em;">
<h3>Library Account Login
</h3>
</span>
<div id="login">
<fieldset>
<label for="name">First and Last Name:
</label>
<div class="loginField">
<input id="name" name="name" class="required" tabindex="3" accesskey="n"
type="text" value="" size="20" maxlength="40"/>
</div>
<fieldset class="barcodeAltChoice">
<!--<legend>Enter your barcode or login name</legend>-->
<label for="code">Library card number
<br />(no spaces):
</label>
<div class="loginField">
<input id="code" name="code" class="required" tabindex="4" accesskey="b"
type="text" size="20" maxlength="40" />
</div>
</fieldset>
<!--<div id="ipssopinentry">
<label for="pin">Personal Identification Number (PIN):</label>
<div class="loginFieldBg">
<input id="pin" name="pin" class="required" tabindex="6" accesskey="p"
type="password" value="" size="20" maxlength="40" />
</div>
</div>-->
<!--end theForm2-->
<!--start theForm3-->
<!-- This button is hidden unless using mobile devices. Even if hidden it enables Enter key to submit. -->
<input type="submit" name="Log In" class="loginSubmit" tabindex="35" />
</fieldset>
</div>
<!-- end login -->
<div class="clearfloats">
</div>
<div class="formButtons">
<a href="#" onclick="document.forms['fm1'].submit();" tabindex="40">
<div onmousedown="this.className='pressedState';" onmouseout="this.className='';"
onmouseup="this.className='';">
<div class="buttonSpriteDiv">
<span class="buttonSpriteSpan1">
<span class="buttonSpriteSpan2">Submit
</span>
</span>
</div>
</div>
</a>
</div>
<br />
<div style="display:none;">
<!--Enable form focus-->
<script type="text/javascript">
<!--//-->
<![CDATA[//>
<!--
//Hide the main PIN entry if the new pin section is active.
//try { if ( document.getElementById("ipssonewpin") ) {
// document.getElementById("ipssopinentry").style.display="none"; } }
//catch(err) {}
//Look for the first field in the external patron part of the form. This field will get cursor focus.
var ipssoFirstField;
try { ipssoFirstField = document.forms[0].extpatid; }
catch(err) {
}
//If we still don't have a field, look for the name field in the library account part.
if ( ipssoFirstField==undefined ) { ipssoFirstField = document.forms[0].name; }
//Set focus. Ignore errors.
try { ipssoFirstField.focus(); }
catch(err) {}
document.onkeydown = enterSubmit
function enterSubmit(e) {
var keycode;
if (window.event) keycode = window.event.keyCode;
else if (e) keycode = e.which;
if (keycode==13)
document.forms[0].submit();
}
//-->
<!]]>
</script>
<!--end theForm3-->
<!-- Spring Web Flow requirements must be in a certain place -->
<input type="hidden" name="lt"
value="_c761F6248-082B-2453-47FE-DEBB4500C8AD_kF7718391-1925-2239-9B69-01CE8B941744" />
<input type="hidden" name="_eventId" value="submit" />
</form>
<!--start theForm4-->
</div>
</div>
<!-- end loginArea -->
<div class="loginActions">
<!--
<span class="loginActionText">New to the library?</span>
<span class="loginActionScreenOnly"><a href="/selfreg">Create an account</a></span>
<span class="loginActionSeparator"></span>
-->
</div>
</div>
<!-- loginPage -->
<!--[if IE]>
</div>
<![endif]-->
<!-- IPSSO html form updated 2010 June 29 -->
</body>
</html>
<!--this is customized </iiidb/http/apps//CAS/resources/ipsso_s3.html>-->
<!--end theForm4-->
'''
def test__login__login_fails__throws():
w = wpl.LibraryAccount(MyCard(),
MyOpener('',
failing_login_response))
py.test.raises(LoginError, w.login)
def test__login__new_kpl_format__passes():
w = wpl.LibraryAccount(MyCard(), MyOpener('', '''
<!-- Rel 2007 "Skyline" Example Set -->
<!-- This File Last Changed: 02 September 2008 -->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>Kitchener and Waterloo Public Libraries /KPL</title>
<base target="_self"/>
<link rel="stylesheet" type="text/css" href="/scripts/ProStyles.css" />
<link rel="stylesheet" type="text/css" href="/screens/k-stylesheet1.css" />
<link rel="shortcut icon" type="ximage/icon" href="/screens/favicon.ico" />
<script type="text/javascript" src="/scripts/common.js"></script>
<script type="text/javascript" src="/scripts/features.js"></script>
<script type="text/javascript" src="/scripts/elcontent.js"></script>
<link rel="icon" href="/screens/favicon.ico"><meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
</head>
<body >
<body>
<div id="wrapper">
<div id="header">
<a href="http://www.kpl.org/">
<img src="/screens/kpl_logo.png" alt="Kitchener Public Library logo"/></a>
<div id="nav">
<ul id="navmenu">
<li><a href="http://books.kpl.org/search~S1/" title="Library Catalogue" class="selected">Catalogue</a></li>
</ul>
</div>
<div id="nav2"> </div>
</div>
<font color="purple">You are logged in to Kitchener and Waterloo Public Libraries
/KPL as: </font><font color="purple" size="+2">Hippo, Librarian</font><br />
<br />
<script type="text/javascript">
function SetHTML1(type) {
document.getElementById("a1").style.display = "none"
document.getElementById("b1").style.display = "none"
// Using style.display="block" instead of style.display="" leaves a carriage return
document.getElementById(type).style.display = ""
}
</script>
<div align="center">
<span id="a1" style="">
<form method="get" action="http://encore.kpl.org/iii/encore_kpl/Home,$Search.form.sdirect" name="form" id="form">
<input name="formids" value="target" type="hidden">
<input name="lang" value="eng" type="hidden">
<input name="suite" value="def" type="hidden">
<input name="reservedids" value="lang,suite" type="hidden">
<input name="submitmode" value="" type="hidden">
<input name="submitname" value="" type="hidden">
<table>
<tr>
<td style="padding-right:10px;">
<span style="font-family:'Times New Roman', Times, serif; font-size:1.4em;">Search:</span>
</td>
<td><input name="target" value="" id="target" type="text"
style="border:1px solid #555; width:410px; height:30px; font-size:100%;">
</td>
<td style="padding-left:10px;">
<input type="image" src="http://www.kpl.org/_images/catalogue/go_button.png" value="submit"/>
</td>
</tr>
<tr><td colspan="3" style="font-size:12px;"> </td></tr>
</table>
</form>
</span>
<span id="b1" style="display:none;">
<div class="bibSearchtool" style="margin-top:5px;"><form target="_self" action="/search~S2/">
<label for="searchtype" style="display:none;">Search Type1</label><select name="searchtype" id="searchtype">
<option value="t"> TITLE</option>
<option value="a"> AUTHOR</option>
<option value="s"> SERIES</option>
<option value="d"> SUBJECT</option>
<option value="c"> CALL NO</option>
<option value="i"> ISBN/ISSN</option>
<option value="Y" selected="selected"> KEYWORD</option>
</select>
<label for="searcharg" style="display:none;">Search</label>
<input type="text" name="searcharg" id="searcharg" size="30" maxlength="75" value="" />
<label for="searchscope" style="display:none;">Search Scope</label><select name="searchscope" id="searchscope">
<option value="2" selected>Kitchener Public Library</option>
<option value="3">Waterloo Public Library</option>
<option value="5">King Digital Collection</option>
</select>
<input type="hidden" name="SORT" value="D" />
<input type="hidden" name="extended" value="0" /><input type="submit" name="SUBMIT" value="Search" />
<div style="margin-top:6px;">
<input type="checkbox" name="availlim" value="1" />
<span class="limit-to-available">Limit results to available items<br/><br/></span>
</div>
</form></div>
</span>
<div align="center" style=" font-family: Arial, Helvetica, sans-serif; font-size:14px;">
<input style="margin-top:5px;" id="multisearch" name="br"
type="radio" onClick="SetHTML1('a1')" checked>Search New KPL Catalogue
<input style="margin-top:5px;" id="multisearch" name="br"
type="radio" onClick="SetHTML1('b1')">Search Classic Catalogue
</div>
<br /><br />
<p style="font-size:0.85em;">
<span style="color:#990000; font-weight:bold;">Note:</span>
Please remember to <strong>LOG OUT</strong> of your library account when you are finished using the catalogue.<br />
The logout option can be found at the bottom of this page, or in the top right corner of the catalogue.</p>
<br />
</div>
<!--{patron}-->
<br/><br/>
<div align="center">
<table>
<tr>
<td>
<div class="patNameAddress">
<strong>Hippo, Librarian</strong><br />
100 Regina Street S<br />
Waterloo ON N2V 4A8<br />
519-885-1550 (E)<br />
EXP DATE:08-01-2013<br />
<br/>
<div>
</div>
<div>
<a href="/patroninfo~S1/XXXXXXXX/holds" target="_self">4 requests (holds).</a>
</div>
<br><br>
</div>
</td>
<td>
<div class="patActionsLinks">
<div>
<a href="#" onClick="return open_new_window( '/patroninfo~S1/XXXXXXXX/modpinfo' )">Modify Personal Information</a>
</div>
<div><p>
<a href="/patroninfo~S1/XXXXXXXX/readinghistory" target="_self">My Reading History</a>
</p></div>
<br>
Classic catalogue only:
<div><p>
<a href="/patroninfo~S1/XXXXXXXX/getpsearches" target="_self">Preferred Searches</a>
</p></div>
<div>
<a href="/patroninfo~S1/XXXXXXXX/mylists" target="_self">My Lists</a>
</div>
<br>
<p><a href="http://encore.kpl.org/iii/encore_kpl/home?...">
<img src="/screens/b-logout.gif" alt="Log Out" border="0" />
</a></p>
<!--
<p valign=top><a href="/logout?" target="_self"><img src="/screens/b-logout.gif" alt="Log Out" border="0" /></a></p>
-->
</div></td>
</tr>
</table>
</div>
<br/><br/>
<div class="patFuncArea" style="border:1px solid #555555;">
</div>
<br />
<div class="footer"></div>
</div>
</body>
</html>
<!--this is customized <screens/patronview_web_s1.html>-->
'''))
w.login()
def test__get_status__with_card_expiry_date__reads_date():
w = wpl.LibraryAccount(MyCard(), MyOpener('',
'''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>Kitchener and Waterloo Public Libraries /WPL</title>
<base target="_self"/>
<link rel="stylesheet" type="text/css" href="/scripts/ProStyles.css" />
<link rel="stylesheet" type="text/css" href="/screens/w-stylesheet3.css" />
<link rel="shortcut icon" type="ximage/icon" href="/screens/favicon.ico" />
<script type="text/javascript" src="/scripts/common.js"></script>
<script type="text/javascript" src="/scripts/features.js"></script>
<script type="text/javascript" src="/scripts/elcontent.js"></script>
<link rel="icon" href="/screens/favicon.ico"><meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
</head>
<body >
<script language="JavaScript" type="text/javascript">
var min=8;
var max=22;
function increaseFontSize() {
var p = document.getElementsByTagName('*')
for(i=0;i<p.length;i++) {
if(p[i].style.fontSize) {
var s = parseInt(p[i].style.fontSize.replace("px",""));
} else {
var s = 14;
}
if(s!=max) {
s += 2;
}
p[i].style.fontSize = s+"px"
}
}
function decreaseFontSize() {
var p = document.getElementsByTagName('*');
for(i=0;i<p.length;i++) {
if(p[i].style.fontSize) {
var s = parseInt(p[i].style.fontSize.replace("px",""));
} else {
var s = 14;
}
if(s!=min) {
s -= 2;
}
p[i].style.fontSize = s+"px"
}
}
</script>
<script language="JavaScript" type="text/javascript">
<!-- Hide the JS
startTimeout(600000, "/search~S3/");
-->
</script>
<!-- begin toplogo.html file -->
<!-- HEADER -->
<a class="linkskip" href="#content">Skip over navigation</a>
<div id="container-header">
<div id="header-main1-background">
<div id="container-header-content">
<div id="header-logo">
<a href="http://www.wpl.ca">
<img src="/screens/wpl-logo-main1.jpg" alt="Waterloo Public Library"/>
</a></div>
<div id="header-nav" align=center>
<ul>
<li><a href="http://books.kpl.org/selfreg~S3/">Get a Card</a></li>
<li><a href="https://books.kpl.org/iii/cas/login?service=http://books.kpl.org/patroninfo~S3/..."
class="navline">My Account</a></li>
<li><a href="http://www.wpl.ca/location">Hours & Locations</a></li>
<li><a href="http://www.wpl.ca/contact">Contact Us</a></li>
</ul>
</div>
<div id="header-main1-utility">
<div id="header-title" class=title1><a href="/search~S3/">Catalogue</a></div>
<div id="font-size">
<a href="javascript:decreaseFontSize();">
<img src="/screens/wpl-font-smaller.gif" alt="Font Smaller" width="15" height="38"/>
</a>
<a href="javascript:increaseFontSize();" >
<img src="/screens/wpl-font-larger.gif" alt="Font Larger" width="19" height="38"/>
</a>
</div>
</div>
</div>
</div>
</div>
<!-- NAV -->
<div id="container-nav" align=center>
<div id="nav">
<ul>
<li><a href="http://www.wpl.ca" class="navline">Home</a></li>
<li><a href="http://books.kpl.org/search~S3">Catalogue</a></li>
<li><a href="http://www.wpl.ca/ebooks">eBooks</a></li>
<li><a href="http://www.wpl.ca/ebranch">eBranch</a></li>
<li><a href="http://www.wpl.ca/book-a-computer">Book a Computer</a></li>
<li><a href="http://www.wpl.ca/blogs-more">Blogs</a></li>
<li><a href="http://www.wpl.ca/ebranch/diy/">DIY</a></li>
</ul>
</div>
</div>
<div align=center>
<a href="http://wplreads.wpl.ca">WPL Reads</a> |
<a href="http://books.kpl.org/screens/newitems.html">New Items</a>
| <a href="http://www.wpl.ca/about/borrowing/interlibrary-loan-form/">Interlibrary Loan</a>
| <a href="http://www.wpl.ca/ebranch/databases-and-weblinks">Databases and WebLinks</a>
| <a href="http://www.wpl.ca/services/ask-us/">Ask Us</a>
</div>
<!--end toplogo.html-->
<br />
<p align=center><font size=4 color=#0066cc>Kitchener and Waterloo Public Libraries/WPL <br />
You are logged in as HIPPO, LIBRARIAN.</font><p><br />
<br />
<div class="srchhelpHeader" align="center">
<form method="get" action="http://encore.kpl.org/iii/encore_wpl/Home,$Search.form.sdirect" name="form" id="form">
<input name="formids" value="target" type="hidden">
<input name="lang" value="eng" type="hidden">
<input name="suite" value="def" type="hidden">
<input name="reservedids" value="lang,suite" type="hidden">
<input name="submitmode" value="" type="hidden">
<input name="submitname" value="" type="hidden">
<table>
<tr>
<td style="padding-right:10px;">
<span style="font-family:'Times New Roman', Times, serif; font-size:1.7em;">Search:</span>
</td>
<td><input name="target" value="" id="target" type="text"
style="border:1px solid #555; width:410px; height:30px; font-size:1.4em;"></td>
<td style="padding-left:10px;"><input type="image" src="/screens/go_button.png" value="submit"/></td>
</tr>
<tr>
<td></td>
<td align="right">
<p><a href="http://encore.kpl.org/iii/encore_wpl/home?lang=eng&suite=kpl&advancedSearch=true&searchString=">
Advanced Search</a></p></td>
<td></td></tr>
</table>
</form>
<br />
<a name="content" id="content"></a>
<!--<form name="searchtool" action="/search~S3/">
<select tabindex="1" name="searchtype" id="searchtype" onChange="initSort()">
<option value="X" selected>Keyword</option>
<option value="t">Title</option>
<option value="a">Author</option>
<option value="s">Series</option>
<option value="d">Subject</option>
<option value="c">Call Number</option>
<option value="i">ISBN/ISSN</option>
</select>
<input tabindex="2" type="text" name="searcharg" size="50" maxlength="75">
<input type="hidden" name="searchscope" value="3">
<input tabindex="3" type="submit" value="Search">
</div>
<div class="media">
<div align="center">Media (book, DVD, etc.):
<select tabindex="4" name="searchlimits">
<option value="" selected>Any</option>
<option value="m=d">DVD</option>
<option value="m=j">CD Audio</option>
<option value="m=m">CD-ROM</option>
<option value="m=z">E-audio Book</option>
<option value="m=e">E-book</option>
<option value="m=a">Book</option>
<option value="m=l">Large Print Book</option>
<option value="m=v">Government Document</option>
<option value="m=c">Magazine/Newspaper</option>
<option value="m=o">Kit</option>
</select>
</div>
</div>
<label class="limit-to-available">
<div align="center">
<input tabindex="5" type="checkbox" name="availlim" value="1">
Limit results to available items
</div>
</label>
</form>
<br />-->
<!--{patron}-->
<table>
<tr>
<td valign=top>
<div class="patNameAddress">
<strong>HIPPO, LIBRARIAN.</strong><br />
100 Regina Steet S<br />
WATERLOO, ON N2V 4A8<br />
519-885-1550<br />
EXP DATE:12-04-2009<br />
<br/>
<div>
</div>
<div>
<a href="/patroninfo~S3/1307788/holds" target="_self">14 requests (holds).</a>
</div>
<div>
<a href="/patroninfo~S3/1307788/items" target="_self">8 Items currently checked out</a>
</div>
</div>
</td>
<td>
<div style="text-align:left;">
<div>
<a href="#" onClick="return open_new_window( '/patroninfo~S3/1307788/modpinfo' )">Modify Personal Information</a>
</div>
<div><p>
<a href="/patroninfo~S3/1307788/readinghistory" target="_self">My Reading History</a>
</p></div>
<div><p>
<p> </p>
Classic Catalogue Features:
</p></div>
<div><p>
<a href="/patroninfo~S3/1307788/getpsearches" target="_self">Preferred Searches</a>
</p></div>
<div style="display:none;">
<a href="/patroninfo~S3/1307788/patreview" target="_self">My Reviews</a>
</div>
<div>
<a href="/patroninfo~S3/1307788/mylists" target="_self">My Lists</a>
</div>
</div>
</td>
</tr>
</table>
<table>
<tr>
<td>
<div class="patActionsLinks">
<!--
<p valign=top><a href="http://encore.kpl.org/iii/encore_wpl/..."
target="_self"><img src="/screens/b-logout.gif" alt="Log Out" border="0" /></a></p>-->
<p valign=top>
<a href="http://encore.kpl.org/iii/encore_wpl/home?component=..."
target="_self">
<img src="/screens/b-logout.gif" alt="Log Out" border="0" /></a></p>
</div></td>
</tr>
</table>
<br/><br/>
<div class="patFuncArea" style="border:0px #555555;">
</div>
<br />
</div>
<div class="botlogo">
<br />
Your library account may not be available during scheduled system maintenance
10:00pm - 12:00am Mon to Thu, & 6pm - 8pm Fri to Sun.
<br />
</div>
</body>
</html>
<!--this is customized <screens/patronview_web_s3.html>-->
''', '', '', ''))
card_info = w.get_status()
assert datetime.date(2009, 12, 4) == card_info.expires
def test__get_status__wpl_login__finds_correct_holds_url():
w = wpl.LibraryAccount(MyCard(), MyOpener('#login',
'''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<body >
<table>
<tr>
<td valign=top>
<div class="patNameAddress">
<div>
<a href="/patroninfo~S3/XXXXXXX/holds" target="_self">4 requests (holds).</a>
</div>
<div>
<a href="/patroninfo~S3/XXXXXXX/items" target="_self">5 Items currently checked out</a>
</div>
</div>
</td>
<td>
<div class="patActionsLinks">
<div>
<a href="#" onClick="return open_new_window( '/patroninfo~S3/XXXXXXX/newpin' )">Modify your PIN</a>
</div>
<div><p>
<a href="/patroninfo~S3/XXXXXXX/readinghistory" target="_self">My Reading History</a>
</p></div>
<div><p>
<a href="/patroninfo~S3/XXXXXXX/getpsearches" target="_self">Preferred Searches</a>
</p></div>
</div>
</td>
</tr>
</table>
</body>
</html>''', '''<table>
<tr class="patFuncHeaders"><th> TITLE </th></tr>
<tr><td align="left"><a href="/BLAH"> Either/Or / Bo/o! </a></td></tr>
</table>''', '#items', '#logout'))
status = w.get_status()
assert 'https://books.kpl.org/patroninfo~S3/XXXXXXX/holds' == status.holds[0].holds_url
def test__get_status__wpl_login_no_holds__finds_no_holds():
w = wpl.LibraryAccount(MyCard(), MyOpener('#login',
'''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<body >
<table>
<tr>
<td valign=top>
<div class="patNameAddress">
<div>
<a href="/patroninfo~S3/XXXXXXX/items" target="_self">5 Items currently checked out</a>
</div>
</div>
</td>
<td>
<div class="patActionsLinks">
<div>
<a href="#" onClick="return open_new_window( '/patroninfo~S3/XXXXXXX/newpin' )">Modify your PIN</a>
</div>
<div><p>
<a href="/patroninfo~S3/XXXXXXX/readinghistory" target="_self">My Reading History</a>
</p></div>
<div><p>
<a href="/patroninfo~S3/XXXXXXX/getpsearches" target="_self">Preferred Searches</a>
</p></div>
</div>
</td>
</tr>
</table>
</body>
</html>''', '#holds', '#items', '#logout'))
status = w.get_status()
assert status.holds == []
def test__get_status__wpl_login_no_items__finds_no_items():
w = wpl.LibraryAccount(MyCard(), MyOpener('#login',
'''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<body >
<table>
<tr>
<td valign=top>
<div class="patNameAddress">
<div>
<a href="/patroninfo~S3/XXXXXXX/holds" target="_self">4 requests (holds).</a>
</div>
</div>
</td>
<td>
<div class="patActionsLinks">
<div>
<a href="#" onClick="return open_new_window( '/patroninfo~S3/XXXXXXX/newpin' )">Modify your PIN</a>
</div>
<div><p>
<a href="/patroninfo~S3/XXXXXXX/readinghistory" target="_self">My Reading History</a>
</p></div>
<div><p>
<a href="/patroninfo~S3/XXXXXXX/getpsearches" target="_self">Preferred Searches</a>
</p></div>
</div>
</td>
</tr>
</table>
</body>
</html>''', '#holds', '#items', '#logout'))
status = w.get_status()
assert status.items == []
def test__login_wpl_format_2013_06_07__can_parse_the_login_screen():
w = wpl.LibraryAccount(MyCard(), MyOpener('''
<form id="fm1" class="fm-v clearfix" method="post"
action="/iii/cas/login?service=https://books.kpl.org/patroninfo~S3/j_acegi_cas_security_check&lang=eng&scope=3">
<!--display any errors-->
<!-- Message from client webapp to be displayed on the CAS login screen -->
<div id="clientmessage">
<!--display any errors-->
</div>
<!-- end clientmessage -->
<!--start theForm2-->
<!-- Message from client webapp to be displayed on the CAS login screen -->
<div id="clientmessage">
<!--display any errors-->
</div>
<!-- end clientmessage -->
<!--display login form-->
<span style="padding-left:1.8em;">
<h3>Library Account Login
</h3>
</span>
<div id="login">
<fieldset>
<label for="name">First and Last Name:
</label>
<div class="loginField">
<input id="name" name="name" class="required" tabindex="3"
accesskey="n" type="text" value="" size="20" maxlength="40"/>
</div>
<fieldset class="barcodeAltChoice">
<!--<legend>Enter your barcode or login name</legend>-->
<label for="code">Library card number
<br />(no spaces):
</label>
<div class="loginField">
<input id="code" name="code" class="required" tabindex="4"
accesskey="b" type="text" size="20" maxlength="40" />
</div>
</fieldset>
<!--<div id="ipssopinentry">
<label for="pin">Personal Identification Number (PIN):</label>
<div class="loginFieldBg">
<input id="pin" name="pin" class="required" tabindex="6"
accesskey="p" type="password" value="" size="20" maxlength="40" />
</div>
</div>-->
<!--end theForm2-->
<!--start theForm3-->
<!-- This button is hidden unless using mobile devices. Even if hidden it enables Enter key to submit. -->
<input type="submit" name="Log In" class="loginSubmit" tabindex="35" />
</fieldset>
</div>
<!-- end login -->
<div class="clearfloats">
</div>
<div class="formButtons">
<a href="#" onclick="document.forms['fm1'].submit();" tabindex="40">
<div onmousedown="this.className='pressedState';"
onmouseout="this.className='';" onmouseup="this.className='';">
<div class="buttonSpriteDiv">
<span class="buttonSpriteSpan1">
<span class="buttonSpriteSpan2">Submit
</span>
</span>
</div>
</div>
</a>
</div>
<!--end theForm3-->
<!-- Spring Web Flow requirements must be in a certain place -->
<input type="hidden" name="lt"
value="_cF3646058-103E-2F3B-C9DB-0C9931EDB267_k24CDA5F8-E174-085D-7570-0D56ADBFE0E7" />
<input type="hidden" name="_eventId" value="submit" />
</form>''',
# "patNameAddress" is enough to make the login think it worked
'''"patNameAddress"'''))
w.login()
| blairconrad/LibraryHippo | Tests/test_wpl.py | Python | mit | 43,177 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Copyright (c) 2013 Ulrich Mierendorff
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
'''
import socket
import struct
MB_SET_BULK = 0xb8
MB_GET_BULK = 0xba
MB_REMOVE_BULK = 0xb9
MB_ERROR = 0xbf
MB_PLAY_SCRIPT = 0xb4
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 1978
DEFAULT_EXPIRE = 0xffffffffff
FLAG_NOREPLY = 0x01
class KyotoTycoonError(Exception):
""" Class for Exceptions in this module """
class KyotoTycoon:
def __init__(self, host=DEFAULT_HOST, port=DEFAULT_PORT, lazy=True,
timeout=None):
self.host = host
self.port = port
self.timeout = timeout
self.socket = None
if not lazy:
self._connect()
def set(self, key, val, db, expire=DEFAULT_EXPIRE, flags=0):
return self.set_bulk(((key,val,db,expire),), flags)
def set_bulk_kv(self, kv, db, expire=DEFAULT_EXPIRE, flags=0):
recs = ((key,val,db,expire) for key,val in kv.items())
return self.set_bulk(recs, flags)
def set_bulk(self, recs, flags=0):
if self.socket is None:
self._connect()
request = [struct.pack('!BI', MB_SET_BULK, flags), None]
cnt = 0
for key,val,db,xt in recs:
request.append(struct.pack('!HIIq', db, len(key), len(val), xt))
request.append(key)
request.append(val)
cnt += 1
request[1] = struct.pack('!I', cnt)
self._write(b''.join(request))
if flags & FLAG_NOREPLY:
return None
magic, = struct.unpack('!B', self._read(1))
if magic == MB_SET_BULK:
recs_cnt, = struct.unpack('!I', self._read(4))
return recs_cnt
elif magic == MB_ERROR:
raise KyotoTycoonError('Internal server error 0x%02x' % MB_ERROR)
else:
raise KyotoTycoonError('Unknown server error')
def get(self, key, db, flags=0):
recs = self.get_bulk(((key, db),), flags)
if not recs:
return None
return recs[0][1]
def get_bulk_keys(self, keys, db, flags=0):
recs = ((key,db) for key in keys)
recs = self.get_bulk(recs, flags)
return dict(((key,val) for key,val,db,xt in recs))
def get_bulk(self, recs, flags=0):
if self.socket is None:
self._connect()
request = [struct.pack('!BI', MB_GET_BULK, flags), None]
cnt = 0
for key,db in recs:
request.append(struct.pack('!HI', db, len(key)))
request.append(key)
cnt += 1
request[1] = struct.pack('!I', cnt)
self._write(b''.join(request))
magic, = struct.unpack('!B', self._read(1))
if magic == MB_GET_BULK:
recs_cnt, = struct.unpack('!I', self._read(4))
recs = []
for i in range(recs_cnt):
db,key_len,val_len,xt = struct.unpack('!HIIq', self._read(18))
key = self._read(key_len)
val = self._read(val_len)
recs.append((key,val,db,xt))
return recs
elif magic == MB_ERROR:
raise KyotoTycoonError('Internal server error 0x%02x' % MB_ERROR)
else:
raise KyotoTycoonError('Unknown server error')
def remove(self, key, db, flags=0):
return self.remove_bulk(((key,db),), flags)
def remove_bulk_keys(self, keys, db, flags=0):
recs = ((key,db) for key in keys)
return self.remove_bulk(recs, flags)
def remove_bulk(self, recs, flags=0):
if self.socket is None:
self._connect()
request = [struct.pack('!BI', MB_REMOVE_BULK, flags), None]
cnt = 0
for key,db in recs:
request.append(struct.pack('!HI', db, len(key)))
request.append(key)
cnt += 1
request[1] = struct.pack('!I', cnt)
self._write(b''.join(request))
if flags & FLAG_NOREPLY:
return None
magic, = struct.unpack('!B', self._read(1))
if magic == MB_REMOVE_BULK:
recs_cnt, = struct.unpack('!I', self._read(4))
return recs_cnt
elif magic == MB_ERROR:
raise KyotoTycoonError('Internal server error 0x%02x' % MB_ERROR)
else:
raise KyotoTycoonError('Unknown server error')
def play_script(self, name, recs, flags=0):
if self.socket is None:
self._connect()
request = [struct.pack('!BII', MB_PLAY_SCRIPT, flags, len(name)), None,
name]
cnt = 0
for key,val in recs:
request.append(struct.pack('!II', len(key), len(val)))
request.append(key)
request.append(val)
cnt += 1
request[1] = struct.pack('!I', cnt)
self._write(b''.join(request))
if flags & FLAG_NOREPLY:
return None
magic, = struct.unpack('!B', self._read(1))
if magic == MB_PLAY_SCRIPT:
recs_cnt, = struct.unpack('!I', self._read(4))
recs = []
for i in range(recs_cnt):
key_len,val_len = struct.unpack('!II', self._read(8))
key = self._read(key_len)
val = self._read(val_len)
recs.append((key,val))
return recs
elif magic == MB_ERROR:
raise KyotoTycoonError('Internal server error 0x%02x' % MB_ERROR)
else:
raise KyotoTycoonError('Unknown server error')
def close(self):
if self.socket is not None:
self.socket.close()
self.socket = None
def _connect(self):
self.socket = socket.create_connection((self.host, self.port),
self.timeout)
def _write(self, data):
self.socket.sendall(data)
def _read(self, bytecnt):
buf = []
read = 0
while read < bytecnt:
recv = self.socket.recv(bytecnt-read)
if recv:
buf.append(recv)
read += len(recv)
return b''.join(buf)
| ganwell/ktasync | files/kyototycoon_orig.py | Python | agpl-3.0 | 7,463 |
#/usr/bin/env python
import codecs
import os
import sys
from setuptools import setup, find_packages
if 'publish' in sys.argv:
os.system('python setup.py sdist upload')
sys.exit()
read = lambda filepath: codecs.open(filepath, 'r', 'utf-8').read()
# Dynamically calculate the version based on galeria.VERSION.
version = __import__('galeria').get_version()
setup(
name='django-galeria',
version=version,
description='Pluggable gallery/portfolio application for Django projects',
long_description=read(os.path.join(os.path.dirname(__file__), 'README.rst')),
author='Guilherme Gondim',
author_email='semente+django-galeria@taurinus.org',
maintainer='Guilherme Gondim',
maintainer_email='semente+django-galeria@taurinus.org',
license='BSD License',
url='https://bitbucket.org/semente/django-galeria/',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=['django-mptt']
)
| zokeber/django-galeria | setup.py | Python | bsd-3-clause | 1,378 |
from .base import BaseEncoder
import gzip
try:
from io import BytesIO as StringIO
except ImportError:
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class Gzip(BaseEncoder):
content_encoding = 'gzip'
def __init__(self, *args, **kwargs):
self.compress_level = kwargs.pop('compress_level', 6)
self.buffer_class = kwargs.pop('buffer_class', StringIO)
super(Gzip, self).__init__(*args, **kwargs)
def compress(self, data):
gzip_buffer = self.buffer_class()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(data)
gzip_file.close()
return gzip_buffer.getvalue() | allanlei/flask-content-encoding | flask_contentencoding/gzip.py | Python | bsd-3-clause | 763 |
#!/usr/bin/env python3
# Copyright (C) 2014 University Radio York
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Lists all permissions assigned to the role with the given alias.
Each is listed, one per line, in ascending alphabetical order, in the
format 'SHORT_NAME'.
Usage: roleperms.py ALIAS
"""
import sys
import database
if __name__ == '__main__':
permissions = database.permissions_for_roles(sys.argv[1:])
print('\n'.join(permissions))
| CaptainHayashi/permtools | roleperms.py | Python | mit | 1,470 |
from django.shortcuts import render
from manabi.apps.flashcards.models import Deck
def homepage(request):
if request.user.is_anonymous:
return render(request, 'landing_page.html')
decks = Deck.objects.of_user(request.user)
return render(request, 'homepage.html', {
'decks': decks,
})
| aehlke/manabi | manabi/views.py | Python | mit | 321 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.