repo_name
stringlengths
6
90
path
stringlengths
4
230
copies
stringlengths
1
4
size
stringlengths
4
7
content
stringlengths
734
985k
license
stringclasses
15 values
hash
int64
-9,223,303,126,770,100,000
9,223,233,360B
line_mean
float64
3.79
99.6
line_max
int64
19
999
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
8.06
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
berkmancenter/mediacloud
apps/common/src/python/mediawords/util/colors.py
1
4207
import random from colorsys import hsv_to_rgb, rgb_to_hsv from typing import List from mediawords.db import DatabaseHandler from mediawords.util.perl import decode_object_from_bytes_if_needed __MC_COLORS = [ '1f77b4', 'aec7e8', 'ff7f0e', 'ffbb78', '2ca02c', '98df8a', 'd62728', 'ff9896', '9467bd', 'c5b0d5', '8c564b', 'c49c94', 'e377c2', 'f7b6d2', '7f7f7f', 'c7c7c7', 'bcbd22', 'dbdb8d', '17becf', '9edae5', '84c4ce', 'ffa779', 'cc5ace', '6f11c9', '6f3e5d', ] def hex_to_rgb(hex_color: str) -> tuple: """Get R, G, B channels from hex color, e.g. "FF0000".""" if hex_color.startswith('#'): hex_color = hex_color[1:] return tuple(int(hex_color[i:i + 2], 16) for i in (0, 2, 4)) def rgb_to_hex(r: int, g: int, b: int) -> str: """Get hex color (e.g. "FF0000") from R, G, B channels.""" return '%02x%02x%02x' % (int(r), int(g), int(b),) def analogous_color(color: str, return_slices: int = 4, split_slices: int = 12) -> List[str]: """Generate analogous color scheme starting with the provided color. Analogous color scheme is the one in which the colors lie next to each other on the color wheel. By default this method splits up the color wheel into 12 pieces and returns the original parameter color plus the next 3 pieces of the wheel (4 colors in total). For example, by padding '0000ff' (blue) you would get back '0000ff' (blue -- the original color) '80000ff' (purple), 'ff00ff' (pink) and 'ff0080' (hot pink) colors. Ported from https://metacpan.org/pod/Color::Mix#analogous(). :param color: Starting color :param return_slices: Number of color slices to return :param split_slices: Number of slices to split the color wheel into :return: Generated colors starting with the parameter color """ color = decode_object_from_bytes_if_needed(color) def shift_hue(hue: int, angle_: float) -> int: return int((hue + angle_) % 360) def rotate_color(color_: str, angle_: float) -> str: r, g, b = hex_to_rgb(color_) h, s, v = rgb_to_hsv(r, g, b) h *= 360 h = shift_hue(hue=h, angle_=angle_) h /= 360 r, g, b = hsv_to_rgb(h, s, v) r = int(r) g = int(g) b = int(b) return rgb_to_hex(r, g, b) angle = 360 / split_slices colors = [color] for x in range(1, return_slices): new_color = rotate_color(color_=color, angle_=angle * x) colors.append(new_color) return colors def get_consistent_color(db: DatabaseHandler, item_set: str, item_id: str) -> str: """Return the same hex color (e.g. "ff0000" for the same set / ID combination every time this function is called.""" item_set = decode_object_from_bytes_if_needed(item_set) item_id = decode_object_from_bytes_if_needed(item_id) # Always return grey for null or not typed values if item_id.lower() in {'null', 'not typed'}: return '999999' color = db.query("""SELECT color FROM color_sets WHERE color_set = %(item_set)s AND id = %(item_id)s""", { 'item_set': item_set, 'item_id': item_id, }).flat() if color is not None and len(color): if isinstance(color, list): color = color[0] return color set_colors = db.query("""SELECT color FROM color_sets WHERE color_set = %(item_set)s""", { 'item_set': item_set, }).flat() if set_colors is not None: if not isinstance(set_colors, list): set_colors = [set_colors] existing_colors = set() if set_colors is not None: for color in set_colors: existing_colors.add(color) # Use the hard coded palette of 25 colors if possible new_color = None for color in __MC_COLORS: if color not in existing_colors: new_color = color break # Otherwise, just generate a random color if new_color is None: colors = analogous_color(color='0000ff', return_slices=256, split_slices=255) new_color = random.choice(colors) db.create(table='color_sets', insert_hash={ 'color_set': item_set, 'id': item_id, 'color': new_color, }) return new_color
agpl-3.0
3,920,569,508,398,515,000
33.768595
120
0.614452
false
3.170309
false
false
false
bundgus/python-playground
matplotlib-playground/examples/user_interfaces/embedding_in_qt4_wtoolbar.py
5
2225
from __future__ import print_function import sys import numpy as np from matplotlib.figure import Figure from matplotlib.backend_bases import key_press_handler from matplotlib.backends.backend_qt4agg import ( FigureCanvasQTAgg as FigureCanvas, NavigationToolbar2QT as NavigationToolbar) from matplotlib.backends import qt4_compat use_pyside = qt4_compat.QT_API == qt4_compat.QT_API_PYSIDE if use_pyside: from PySide.QtCore import * from PySide.QtGui import * else: from PyQt4.QtCore import * from PyQt4.QtGui import * class AppForm(QMainWindow): def __init__(self, parent=None): QMainWindow.__init__(self, parent) #self.x, self.y = self.get_data() self.data = self.get_data2() self.create_main_frame() self.on_draw() def create_main_frame(self): self.main_frame = QWidget() self.fig = Figure((5.0, 4.0), dpi=100) self.canvas = FigureCanvas(self.fig) self.canvas.setParent(self.main_frame) self.canvas.setFocusPolicy(Qt.StrongFocus) self.canvas.setFocus() self.mpl_toolbar = NavigationToolbar(self.canvas, self.main_frame) self.canvas.mpl_connect('key_press_event', self.on_key_press) vbox = QVBoxLayout() vbox.addWidget(self.canvas) # the matplotlib canvas vbox.addWidget(self.mpl_toolbar) self.main_frame.setLayout(vbox) self.setCentralWidget(self.main_frame) def get_data2(self): return np.arange(20).reshape([4, 5]).copy() def on_draw(self): self.fig.clear() self.axes = self.fig.add_subplot(111) #self.axes.plot(self.x, self.y, 'ro') self.axes.imshow(self.data, interpolation='nearest') #self.axes.plot([1,2,3]) self.canvas.draw() def on_key_press(self, event): print('you pressed', event.key) # implement the default mpl key press events described at # http://matplotlib.org/users/navigation_toolbar.html#navigation-keyboard-shortcuts key_press_handler(event, self.canvas, self.mpl_toolbar) def main(): app = QApplication(sys.argv) form = AppForm() form.show() app.exec_() if __name__ == "__main__": main()
mit
708,373,481,128,667,900
29.067568
91
0.653034
false
3.471139
false
false
false
anthill-services/anthill-common
anthill/common/pubsub.py
1
4971
from tornado.ioloop import IOLoop from . import rabbitconn from pika.exceptions import ChannelClosed import logging import ujson class Publisher(object): def __init__(self): pass async def publish(self, channel, payload, routing_key=''): raise NotImplementedError() async def release(self): pass async def start(self): raise NotImplementedError() class Subscriber(object): def __init__(self): self.handlers = {} async def on_receive(self, channel, payload): handlers = self.handlers.get(channel, None) if handlers is not None: for handler in handlers: await handler(payload) async def release(self): pass async def start(self): pass async def on_channel_handled(self, channel_name, routing_key=None): logging.info("Listening for channel '{0}'.".format(channel_name)) async def handle(self, channel, handler, routing_key=None): existing_handlers = self.handlers.get(channel, None) if existing_handlers is not None: existing_handlers.append(handler) return self.handlers[channel] = [handler] await self.on_channel_handled(channel, routing_key=routing_key) EXCHANGE_PREFIX = "pub_" QUEUE_PREFIX = "sub_" class RabbitMQSubscriber(Subscriber): def __init__(self, broker, name=None, round_robin=True, **settings): super(RabbitMQSubscriber, self).__init__() self.broker = broker self.settings = settings self.connection = None self.queue = None self.consumer = None self.name = name or "*" self.round_robin = round_robin self.channel = None def __on_message__(self, channel, method, properties, body): exchange_name = method.exchange if exchange_name.startswith(EXCHANGE_PREFIX): # cut first letters channel_name = exchange_name[len(EXCHANGE_PREFIX):] logging.debug("Received '{0}' : {1}.".format(channel_name, body)) try: content = ujson.loads(body) except (KeyError, ValueError): logging.exception("Failed to decode incoming message") else: IOLoop.current().spawn_callback(self.on_receive, channel_name, content) else: logging.error("Bad exchange name") channel.basic_ack(delivery_tag=method.delivery_tag) async def release(self): if self.queue: await self.queue.delete() self.connection.close() async def on_channel_handled(self, channel_name, routing_key=None): await self.channel.exchange( exchange=EXCHANGE_PREFIX + channel_name, exchange_type='direct' if routing_key else 'fanout') await self.queue.bind(exchange=EXCHANGE_PREFIX + channel_name, routing_key=routing_key) await super(RabbitMQSubscriber, self).on_channel_handled(channel_name, routing_key=routing_key) async def start(self): self.connection = rabbitconn.RabbitMQConnection( self.broker, connection_name="sub." + self.name, **self.settings) await self.connection.wait_connect() self.channel = await self.connection.channel(prefetch_count=self.settings.get("channel_prefetch_count", 1024)) if self.round_robin: self.queue = await self.channel.queue(queue=QUEUE_PREFIX + self.name, auto_delete=True) else: self.queue = await self.channel.queue(exclusive=True) self.consumer = await self.queue.consume( consumer_callback=self.__on_message__, no_ack=False) await super(RabbitMQSubscriber, self).start() class RabbitMQPublisher(Publisher): def __init__(self, broker, name, **settings): super(RabbitMQPublisher, self).__init__() self.broker = broker self.settings = settings self.connection = None self.channel = None self.exchanges = set() self.name = name async def publish(self, channel, payload, routing_key=''): body = ujson.dumps(payload) logging.info("Publishing '{0}' : {1}.".format(channel, body)) try: self.channel.basic_publish( exchange=EXCHANGE_PREFIX + channel, routing_key=routing_key, body=body) except ChannelClosed: logging.info("Channel '{0}' closed.".format(channel)) await self.release() await self.start() async def release(self): self.connection.close() async def start(self): # connect self.connection = rabbitconn.RabbitMQConnection( self.broker, connection_name="pub." + str(self.name), **self.settings) await self.connection.wait_connect() self.channel = await self.connection.channel()
mit
-2,485,012,216,796,939,300
28.766467
118
0.612955
false
4.212712
false
false
false
NuGrid/NuGridPy
nugridpy/regression_tests/ImageCompare/abu_chart.py
1
1292
from builtins import str from builtins import range from ... import ppn as p import os import os.path def load_chart_files(path = '.'): n = 39 nsparsity = 10 for cycle in range(0,n,nsparsity): cycle_str = str(cycle).zfill(2) os.system("wget -q --content-disposition --directory '" + path + "' " +"'http://www.canfar.phys.uvic.ca/vospace/synctrans?TARGET="\ +"vos%3A%2F%2Fcadc.nrc.ca%21vospace%2Fnugrid%2Fdata%2Fprojects%2Fppn%2Fexamples%2F"\ +"ppn_Hburn_simple%2Fiso_massf000"+cycle_str+".DAT&DIRECTION=pullFromVoSpace&PROTOCOL"\ "=ivo%3A%2F%2Fivoa.net%2Fvospace%2Fcore%23httpget'") os.system("wget -q --content-disposition --directory '" + path + "' " +"'http://www.canfar.phys.uvic.ca/vospace/synctrans?TARGET="\ +"vos%3A%2F%2Fcadc.nrc.ca%21vospace%2Fnugrid%2Fdata%2Fprojects%2Fppn%2Fexamples%2F"\ +"ppn_Hburn_simple%2FMasterAbuChart"+cycle_str+".png&DIRECTION=pullFromVoSpace&PROTOCOL"\ "=ivo%3A%2F%2Fivoa.net%2Fvospace%2Fcore%23httpget'") a=p.abu_vector(path) a.abu_chart(list(range(0,n,nsparsity)),plotaxis=[-1,16,-1,15], savefig=True, path=path) if __name__ == "__main__": load_chart_files()
bsd-3-clause
2,772,963,692,733,972,000
48.692308
107
0.619969
false
2.708595
false
false
false
vidyar/memsql-python
memsql/common/sql_step_queue.py
1
9254
from memsql.common.connection_pool import MySQLError from memsql.common import json, errorcodes, sql_utility import time from datetime import datetime from contextlib import contextmanager PRIMARY_TABLE = """\ CREATE TABLE IF NOT EXISTS %(prefix)s_tasks ( id INT AUTO_INCREMENT PRIMARY KEY, created TIMESTAMP DEFAULT NOW(), data JSON, INDEX (created) )""" EXECUTION_TABLE = """\ CREATE TABLE IF NOT EXISTS %(prefix)s_executions ( id INT AUTO_INCREMENT PRIMARY KEY, task_id INT, steps JSON, started TIMESTAMP DEFAULT NOW(), last_contact TIMESTAMP DEFAULT 0, finished TIMESTAMP DEFAULT 0, UNIQUE INDEX (task_id), INDEX (started), INDEX (last_contact) )""" class StepAlreadyStarted(Exception): pass class StepNotStarted(Exception): pass class StepAlreadyFinished(Exception): pass class AlreadyFinished(Exception): pass class StepRunning(Exception): pass class SQLStepQueue(sql_utility.SQLUtility): def __init__(self, table_prefix="", execution_ttl=60 * 5): """ Initialize the SQLStepQueue with the specified table prefix and execution TTL (in seconds). """ super(SQLStepQueue, self).__init__() self.execution_ttl = execution_ttl self.table_prefix = table_prefix.rstrip('_') + '_stepqueue' self.tasks_table = self.table_prefix + '_tasks' self.executions_table = self.table_prefix + '_executions' self._define_table(self.tasks_table, PRIMARY_TABLE % { 'prefix': self.table_prefix }) self._define_table(self.executions_table, EXECUTION_TABLE % { 'prefix': self.table_prefix }) ############################### # Public Interface def qsize(self): """ Return an approximate number of queued tasks in the queue. """ count = self._query_queued('COUNT(*) AS count') return count[0].count def enqueue(self, data): """ Enqueue task with specified data. """ jsonified_data = json.dumps(data) with self._db_conn() as conn: conn.execute( 'INSERT INTO %s_tasks (data) VALUES (%%s)' % self.table_prefix, jsonified_data ) def start(self, block=False, timeout=None, retry_interval=0.5): """ Retrieve a task handler from the queue. If block is True, this function will block until it is able to retrieve a task. If block is True and timeout is a number it will block for at most <timeout> seconds. retry_interval is the time in seconds between successive retries. """ start = time.time() while 1: task_handler = self._dequeue_task() if task_handler is None and block: if timeout is not None and (time.time() - start) > timeout: break time.sleep(retry_interval) else: break return task_handler ############################### # Private Interface def _query_queued(self, projection): with self._db_conn() as conn: result = conn.query(''' SELECT %(projection)s FROM %(prefix)s_tasks AS tsk LEFT JOIN %(prefix)s_executions AS exc ON tsk.id = exc.task_id WHERE ( exc.task_id IS NULL OR ( exc.finished = 0 AND exc.last_contact <= NOW() - INTERVAL %(ttl)s SECOND ) ) ORDER BY tsk.created ASC -- oldest first ''' % { 'projection': projection, 'prefix': self.table_prefix, 'ttl': self.execution_ttl }) return result def _dequeue_task(self): try: with self._db_conn() as conn: conn.execute(''' DELETE FROM %(prefix)s_executions WHERE finished = 0 AND last_contact <= NOW() - INTERVAL %(ttl)s SECOND ''' % { 'prefix': self.table_prefix, 'ttl': self.execution_ttl }) execution_id = conn.execute(''' INSERT INTO %(prefix)s_executions (task_id, last_contact, steps) SELECT tsk.id, NOW(), '[]' FROM %(prefix)s_tasks AS tsk LEFT JOIN %(prefix)s_executions AS exc ON tsk.id = exc.task_id WHERE exc.task_id IS NULL ORDER BY tsk.created ASC -- oldest first LIMIT 1 ''' % { 'prefix': self.table_prefix, 'ttl': self.execution_ttl }) if execution_id == 0: # select returned no rows return None return TaskHandler(execution_id=execution_id, queue=self) except MySQLError as (errno, msg): if errno == errorcodes.ER_DUP_ENTRY: return None else: raise class TaskHandler(object): def __init__(self, execution_id, queue): self._execution_id = execution_id self._queue = queue self.finished = 0 self.task_id = None self.data = None self.steps = None self._refresh() ############################### # Public Interface def valid(self): if self.finished != 0: return False with self._db_conn() as conn: row = conn.get(''' SELECT (last_contact > NOW() - INTERVAL %(ttl)s SECOND) AS valid FROM %(prefix)s_executions WHERE id = %%s ''' % { 'prefix': self._queue.table_prefix, 'ttl': self._queue.execution_ttl }, self._execution_id) return bool(row is not None and row.valid) def ping(self): """ Notify the queue that this task is still active. """ if self.finished != 0: raise AlreadyFinished() with self._db_conn() as conn: conn.execute(''' UPDATE %(prefix)s_executions SET last_contact=NOW() WHERE id = %%s ''' % { 'prefix': self._queue.table_prefix }, self._execution_id) def finish(self): if self._running_steps() != 0: raise StepRunning() if self.finished != 0: raise AlreadyFinished() self.finished = datetime.utcnow() self._save() def start_step(self, step_name): """ Start a step. """ if self.finished != 0: raise AlreadyFinished() step_data = self._get_step(step_name) if step_data is not None: if 'stop' in step_data: raise StepAlreadyFinished() else: raise StepAlreadyStarted() self.steps.append({ "start": datetime.utcnow(), "name": step_name }) self._save() def stop_step(self, step_name): """ Stop a step. """ if self.finished != 0: raise AlreadyFinished() step_data = self._get_step(step_name) if step_data is None: raise StepNotStarted() elif 'stop' in step_data: raise StepAlreadyFinished() step_data['stop'] = datetime.utcnow() step_data['duration'] = (step_data['stop'] - step_data['start']).total_seconds() self._save() @contextmanager def step(self, step_name): self.start_step(step_name) yield self.stop_step(step_name) ############################### # Private Interface def _get_step(self, step_name): for step in self.steps: if step['name'] == step_name: return step return None def _running_steps(self): return len([s for s in self.steps if 'stop' not in s]) def _db_conn(self): return self._queue._db_conn() def _refresh(self): with self._db_conn() as conn: row = conn.get(''' SELECT exc.task_id, tsk.data, exc.steps FROM %(prefix)s_tasks AS tsk INNER JOIN %(prefix)s_executions AS exc ON tsk.id = exc.task_id WHERE exc.id = %%s ''' % { 'prefix': self._queue.table_prefix }, self._execution_id) self.task_id = row.task_id self.data = json.loads(row.data) self.steps = json.loads(row.steps) def _save(self): with self._db_conn() as conn: conn.execute(''' UPDATE %(prefix)s_executions SET last_contact=NOW(), steps=%%s, finished=%%s WHERE id = %%s ''' % { 'prefix': self._queue.table_prefix }, json.dumps(self.steps), self.finished, self._execution_id)
mit
-6,992,098,494,900,591,000
30.158249
100
0.505727
false
4.440499
false
false
false
NUKnightLab/cityhallmonitor
cityhallmonitor/management/commands/pull_pdfs.py
1
6662
import logging import pprint import re import urllib from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.db.models import F, Q from django.utils import timezone from cityhallmonitor.models import Matter, MatterAttachment from documentcloud import DocumentCloud logger = logging.getLogger(__name__) DOCUMENT_CLOUD_ACCOUNT = settings.DOCUMENT_CLOUD_ACCOUNT DOCUMENT_CLOUD_PROJECT = settings.DOCUMENT_CLOUD_PROJECT ATTACHMENT_PUBLISH_URL = 'https://cityhallmonitor.knightlab.com/documents/%d' DOCCLOUD_RESERVED_KEYS = set([ 'person', 'organization', 'place', 'term', 'email', 'phone', 'city', 'state', 'country', 'title', 'description', 'source', 'account', 'group', 'project', 'projectid', 'document', 'access', 'filter']) """ Document types from the document names seem to be a bit unreliable. Thus, we are currently not using these, but the tentative mapping is here for reference. """ DOCUMENT_TYPES = ( ('SA', 'Substitute Appointment'), ('SO', 'Substitute Ordinance'), ('SR', 'Substitute Resolution'), ('SOr', 'Substitute Order'), ('Or', 'Order'), ('F', 'Filing'), # ?? seems to be used for 'Communication' matter type ('O', 'Ordinance'), ('R', 'Resolution'), ('CL', 'Claim'), ('F', 'Filed Matter'), ('A', 'Appointment'), ) DOCUMENT_TYPE_MATCHER = re.compile('^(%s)\d+-\d+(\.(pdf|rtf))?$' % '|'.join( [code for code,name in DOCUMENT_TYPES])) class DocumentSyncException(Exception): pass class Command(BaseCommand): help = 'Upload updated attachment files from the Chicago Legistar API to DocumentCloud' _client = None def client(self): if self._client is None: self._client = DocumentCloud( settings.DOCUMENT_CLOUD_USERNAME, settings.DOCUMENT_CLOUD_PASSWORD) return self._client def add_arguments(self, parser): parser.add_argument('matter_id', nargs='?', help='Matter ID') parser.add_argument('--all', action='store_true', help='Process all attachments.') parser.add_argument('--deleteall', action='store_true', help='WARNING: Deletes all documents in DocumentCloud. '\ 'Other flags are ignored and no new documents processed.') def get_project(self, name): return self.client().projects.get_by_title(name) def upload_to_doccloud(self, url, title, data=None, project_id=None, published_url=None): """Upload a document to DocumentCloud""" if data is not None: assert not(set(data).intersection(DOCCLOUD_RESERVED_KEYS)) new_document = self.client().documents.upload( url, title, access='public', source=url, data=data, project=project_id, published_url=published_url ) logger.debug(new_document.get_pdf_url()) return new_document def search(self, query): """Seach DocumentCloud""" r = self.client().documents.search(query) assert type(r) is list, \ 'DocumentCloud search response is %s: %s' % (type(r), repr(r)) return r def fetch(self, attachment, project_id): """Upload attachment file to DocumentCloud""" r = self.search('account:%s project:"%s" source: "%s"' % ( DOCUMENT_CLOUD_ACCOUNT, DOCUMENT_CLOUD_PROJECT, attachment.hyperlink)) logger.debug('Result from DocumentCloud is %s' % str(r)) if r: logger.debug( 'Document exists in DocumentCloud. Not transferring: %s', attachment.hyperlink) if len(r) > 1: raise DocumentSyncException( 'Multiple instances exist in DocumentCloud for '\ 'document: %s' % attachment.hyperlink) else: logger.info('Transferring to DocumentCloud: %s', attachment.hyperlink) data = { 'MatterAttachmentId': str(attachment.id), 'MatterId': str(attachment.matter.id), 'ops:DescriptionProcessed': '0' } published_url = ATTACHMENT_PUBLISH_URL % attachment.id doc = self.upload_to_doccloud( attachment.hyperlink, attachment.name, data=data, project_id=project_id, published_url=published_url) attachment.link_obtained_at = timezone.now() attachment.dc_id = doc.id attachment.save() logger.debug( 'Updated link_obtained_at timestamp for '\ 'MatterAttachment: %s', attachment.id) def delete_all(self): """Deletes all documents for this account!!!""" self.stdout.write( 'Deleting all DocumentCloud documents for account:%s project:"%s"', DOCUMENT_CLOUD_ACCOUNT, DOCUMENT_CLOUD_PROJECT) r = self.search('account:%s project:"%s"' % ( DOCUMENT_CLOUD_ACCOUNT, DOCUMENT_CLOUD_PROJECT)) for doc in r: self.stdout.write('Deleting document: %s', doc.source) doc.delete() def handle(self, *args, **options): logger.info( 'matterid=%(matter_id)s, all=%(all)s, deleteall=%(deleteall)s', options) try: if options['deleteall']: answer = input( 'Are you sure you want to delete all documents for ' \ 'account:%s project:"%s"? [Y/n] ' % ( DOCUMENT_CLOUD_ACCOUNT, DOCUMENT_CLOUD_PROJECT)) if answer == '' or answer.lower().startswith('y'): self.delete_all() self.stdout.write('Done\n') else: self.stdout.write('Aborting\n') return project = self.get_project(DOCUMENT_CLOUD_PROJECT) q = MatterAttachment.objects.all() if options['all']: logger.info('Fetching all files') elif options['matter_id']: logger.info('Fetching files for matter ID %s', options['matter_id']) q = q.filter(matter_id=options['matter_id']) else: logger.info('Fetching new files') q = q.filter(link_obtained_at=None) for attachment in [a for a in q]: self.fetch(attachment, project.id) except Exception as e: logger.exception(str(e)) logger.info('Done\n')
mit
-6,875,443,334,515,436,000
32.817259
91
0.574602
false
4.112346
false
false
false
dimara/synnefo
snf-cyclades-app/synnefo/logic/server_attachments.py
1
5690
# Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import logging from snf_django.lib.api import faults from django.conf import settings from synnefo.logic import backend, commands from synnefo.volume import util log = logging.getLogger(__name__) def attach_volume(vm, volume): """Attach a volume to a server. The volume must be in 'AVAILABLE' status in order to be attached. Also, number of the volumes that are attached to the server must remain less than 'GANETI_MAX_DISKS_PER_INSTANCE' setting. This function will send the corresponding job to Ganeti backend and update the status of the volume to 'ATTACHING'. """ # Check volume state if volume.status not in ["AVAILABLE", "CREATING"]: raise faults.BadRequest("Cannot attach volume while volume is in" " '%s' status." % volume.status) elif volume.status == "AVAILABLE": util.assert_detachable_volume_type(volume.volume_type) # Check that disk templates are the same if volume.volume_type_id != vm.flavor.volume_type_id: msg = ("Volume and server must have the same volume type. Volume has" " volume type '%s' while server has '%s'" % (volume.volume_type_id, vm.flavor.volume_type_id)) raise faults.BadRequest(msg) # Check maximum disk per instance hard limit vm_volumes_num = vm.volumes.filter(deleted=False).count() if vm_volumes_num == settings.GANETI_MAX_DISKS_PER_INSTANCE: raise faults.BadRequest("Maximum volumes per server limit reached") if volume.status == "CREATING": action_fields = {"disks": [("add", volume, {})]} else: action_fields = None comm = commands.server_command("ATTACH_VOLUME", action_fields=action_fields) return comm(_attach_volume)(vm, volume) def _attach_volume(vm, volume): """Attach a Volume to a VM and update the Volume's status.""" util.assign_volume_to_server(vm, volume) jobid = backend.attach_volume(vm, volume) log.info("Attached volume '%s' to server '%s'. JobID: '%s'", volume.id, volume.machine_id, jobid) volume.backendjobid = jobid volume.machine = vm if volume.status == "AVAILABLE": volume.status = "ATTACHING" else: volume.status = "CREATING" volume.save() return jobid def detach_volume(vm, volume): """Detach a Volume from a VM The volume must be in 'IN_USE' status in order to be detached. Also, the root volume of the instance (index=0) can not be detached. This function will send the corresponding job to Ganeti backend and update the status of the volume to 'DETACHING'. """ util.assert_detachable_volume_type(volume.volume_type) _check_attachment(vm, volume) if volume.status not in ["IN_USE", "ERROR"]: raise faults.BadRequest("Cannot detach volume while volume is in" " '%s' status." % volume.status) if volume.index == 0: raise faults.BadRequest("Cannot detach the root volume of server %s." % vm) comm = commands.server_command("DETACH_VOLUME") return comm(_detach_volume)(vm, volume) def _detach_volume(vm, volume): """Detach a Volume from a VM and update the Volume's status""" jobid = backend.detach_volume(vm, volume) log.info("Detached volume '%s' from server '%s'. JobID: '%s'", volume.id, volume.machine_id, jobid) volume.backendjobid = jobid volume.status = "DETACHING" volume.save() return jobid def delete_volume(vm, volume): """Delete attached volume and update its status The volume must be in 'IN_USE' status in order to be deleted. This function will send the corresponding job to Ganeti backend and update the status of the volume to 'DELETING'. """ _check_attachment(vm, volume) if volume.status not in ["IN_USE", "ERROR"]: raise faults.BadRequest("Cannot delete volume while volume is in" " '%s' status." % volume.status) if volume.index == 0: raise faults.BadRequest("Cannot delete the root volume of server %s." % vm) action_fields = {"disks": [("remove", volume, {})]} comm = commands.server_command("DELETE_VOLUME", action_fields=action_fields, for_user=volume.userid) return comm(_delete_volume)(vm, volume) def _delete_volume(vm, volume): jobid = backend.delete_volume(vm, volume) log.info("Deleted volume '%s' from server '%s'. JobID: '%s'", volume.id, volume.machine_id, jobid) volume.backendjobid = jobid util.mark_volume_as_deleted(volume) return jobid def _check_attachment(vm, volume): """Check that the Volume is attached to the VM""" if volume.machine_id != vm.id: raise faults.BadRequest("Volume '%s' is not attached to server '%s'" % (volume.id, vm.id))
gpl-3.0
733,053,273,350,417,200
37.445946
79
0.644991
false
3.934993
false
false
false
Anstow/TeamAwesome
game/hud.py
1
1406
import gm class PlayerHud: def __init__(self, player): self.player = player self.player_ident = player.player_ident self.player_text = "Player " + str(self.player_ident) + ": " self.player_score = str(player.score) self.pos = [ 20, 20 ] self.gfx = gfx if self.player_ident % 2 = 0: self.pos[0] = conf.RES[0] - 128 if self.player_ident + 1 % 2 = 0: self.pos[1] = conf.RES[1] - 100 def refresh (self): self.player_score = str(player.score) if self.player_text is not None: self.gfx.rm( self.status_text ) if self.player_score is not None: self.gfx.rm( self.ready_text ) def update( self, active, playing ): self.clear() if active == True: self.status_text = gm.Graphic( conf.GAME.render_text( "menu", "Player " + str( self.num + 1 ), conf.P_COLOURS[self.num] )[0], self.pos ) if playing == False: self.ready_text = gm.Graphic( conf.GAME.render_text( "menu", "Press Start", ( 0xFF, 0xFF, 0xFF ) )[0], self.readypos ) else: #playing == True self.ready_text = gm.Graphic( conf.GAME.render_text( "menu", "Ready!", ( 0x00, 0xFF, 0x00 ) )[0], self.readypos ) self.gfx.add( self.ready_text ) self.gfx.add( self.status_text ) else: #active == False self.status_text = gm.Graphic( conf.GAME.render_text( "menu", "Press A", ( 0x99, 0x99, 0x99 ) )[0], self.pos ) self.gfx.add( self.status_text )
gpl-3.0
-5,930,283,141,110,601,000
28.291667
67
0.615932
false
2.584559
false
false
false
janisstreib/hiwi-stunden
hiwi_portal/migrations/0001_squashed_0017_fillerworkdustactivity_fixedworkdustactivity.py
1
9482
# -*- coding: utf-8 -*- # Generated by Django 1.11.27 on 2020-01-12 15:51 from __future__ import unicode_literals import datetime from django.conf import settings import django.contrib.auth.models import django.core.validators from django.db import migrations, models import django.db.models.deletion from django.utils.timezone import utc class Migration(migrations.Migration): replaces = [('hiwi_portal', '0001_initial'), ('hiwi_portal', '0002_auto_20151030_1355'), ('hiwi_portal', '0003_auto_20151030_1829'), ('hiwi_portal', '0004_auto_20151109_1154'), ('hiwi_portal', '0005_auto_20151110_1354'), ('hiwi_portal', '0006_auto_20151110_1511'), ('hiwi_portal', '0007_auto_20151110_1553'), ('hiwi_portal', '0008_worktime_activity'), ('hiwi_portal', '0009_auto_20151110_1609'), ('hiwi_portal', '0010_auto_20151130_1257'), ('hiwi_portal', '0011_contract_vacation'), ('hiwi_portal', '0012_auto_20151201_1035'), ('hiwi_portal', '0013_auto_20151201_1546'), ('hiwi_portal', '0014_auto_20151204_1131'), ('hiwi_portal', '0015_worklog_overwork'), ('hiwi_portal', '0016_user_work_dusted'), ('hiwi_portal', '0017_fillerworkdustactivity_fixedworkdustactivity')] initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Contract', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('hours', models.IntegerField()), ('payment', models.DecimalField(decimal_places=2, max_digits=6)), ('personell', models.CharField(choices=[(b'GF', b'Gro\xc3\x9fforschungsbereich'), (b'UF', b'Universit\xc3\xa4tsbereich')], max_length=2)), ('personell_number', models.IntegerField()), ('contract_begin', models.DateField(verbose_name=b'Vertragsstart')), ('contract_end', models.DateField(verbose_name=b'Vertragsende')), ], ), migrations.CreateModel( name='Department', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ], ), migrations.CreateModel( name='User', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('firstname', models.CharField(max_length=200)), ('lastname', models.CharField(max_length=200)), ('kitaccount', models.CharField(max_length=32, unique=True)), ('email', models.CharField(max_length=200)), ('private_email', models.CharField(max_length=200, null=True)), ('is_active', models.BooleanField(default=True)), ('last_login', models.DateTimeField()), ('phone_number', models.CharField(blank=True, max_length=15, null=True, validators=[django.core.validators.RegexValidator(message=b"Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", regex=b'^\\+?1?\\d{9,15}$')])), ], managers=[ ('objects', django.contrib.auth.models.UserManager()), ], ), migrations.CreateModel( name='WorkLog', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('printed', models.BooleanField()), ('carer_signed', models.BooleanField()), ('contract', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hiwi_portal.Contract')), ], ), migrations.CreateModel( name='WorkTime', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('hours', models.IntegerField()), ('pause', models.PositiveIntegerField(default=0)), ('begin', models.TimeField(verbose_name=b'Start')), ('end', models.TimeField(verbose_name=b'Ende')), ('work_log', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hiwi_portal.WorkLog')), ('activity', models.CharField(default='', max_length=200)), ('date', models.DateField(default=datetime.datetime(2015, 11, 10, 16, 9, 45, 573599, tzinfo=utc))), ], ), migrations.CreateModel( name='Hiwi', fields=[ ('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='hiwi_portal.User')), ], bases=('hiwi_portal.user',), ), migrations.CreateModel( name='Supervisor', fields=[ ('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='hiwi_portal.User')), ('department', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hiwi_portal.Department')), ], bases=('hiwi_portal.user',), ), migrations.AddField( model_name='contract', name='department', field=models.CharField(max_length=200), ), migrations.AddField( model_name='contract', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='user', name='last_login', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='user', name='notify_to_private', field=models.BooleanField(default=False), ), migrations.AlterField( model_name='user', name='private_email', field=models.CharField(max_length=200, null=True, validators=[django.core.validators.EmailValidator()]), ), migrations.AlterField( model_name='user', name='private_email', field=models.CharField(blank=True, max_length=200, null=True, validators=[django.core.validators.EmailValidator()]), ), migrations.AlterField( model_name='contract', name='personell', field=models.CharField(choices=[(b'GF', b'Gro\xc3\x9fforschungsbereich'), (b'UB', b'Universit\xc3\xa4tsbereich')], max_length=2), ), migrations.AddField( model_name='worklog', name='month', field=models.IntegerField(default=-1), preserve_default=False, ), migrations.AddField( model_name='worklog', name='year', field=models.IntegerField(default=-1), preserve_default=False, ), migrations.AlterField( model_name='worklog', name='carer_signed', field=models.BooleanField(default=False), ), migrations.AlterField( model_name='worklog', name='printed', field=models.BooleanField(default=False), ), migrations.AddField( model_name='contract', name='vacation', field=models.PositiveIntegerField(default=0), preserve_default=False, ), migrations.AlterField( model_name='contract', name='personell_number', field=models.CharField(max_length=200), ), migrations.AlterField( model_name='contract', name='hours', field=models.PositiveIntegerField(validators=[django.core.validators.MaxValueValidator(85)]), ), migrations.AddField( model_name='worklog', name='overWork', field=models.PositiveIntegerField(default=0), ), migrations.AddField( model_name='user', name='work_dusted', field=models.BooleanField(default=False), ), migrations.CreateModel( name='FIllerWorkDustActivity', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('description', models.CharField(max_length=200)), ('avg_length', models.IntegerField()), ('contract', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hiwi_portal.Contract')), ], ), migrations.CreateModel( name='FixedWorkDustActivity', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('description', models.CharField(max_length=200)), ('avg_length', models.IntegerField()), ('start', models.TimeField(verbose_name=b'Start')), ('week_day', models.PositiveIntegerField(validators=[django.core.validators.MaxValueValidator(6)])), ('contract', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hiwi_portal.Contract')), ], ), ]
mit
-7,186,392,725,970,846,000
46.41
772
0.576988
false
4.124402
false
false
false
rsalvador/appium
appium/robot/bitbeambot-d2/robot.py
2
5977
# Copyright 2012 Appium Committers # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import serial import os import pickle from time import sleep import kinematics from numpy import matrix from numpy.linalg import inv class Bot(): class Arm: def __init__(self, bot, num): self.bot = bot self.number = num self.angle = 30 def set_angle(self, new_angle): self.bot.send_command(self.number, new_angle) self.angle = new_angle def move(self, desired_angle, delay=0.1, max_movement=1000): # determine the angles between the current angle and the desired angle intermediate_angles = [] if self.angle < desired_angle: intermediate_angles = range(self.angle, desired_angle + 1) elif desired_angle < self.angle: intermediate_angles = range(self.angle, desired_angle - 1, -1) if self.angle in intermediate_angles: intermediate_angles.remove(self.angle) # only move the arm by the maximum amount if len(intermediate_angles) > max_movement: intermediate_angles = intermediate_angles[:max_movement] # move the arm 1 degree at a time for angle in intermediate_angles: self.set_angle(angle) sleep(delay) def __init__(self, serial_port=None, calibrationFile=None): # auto-detect serial port if serial_port is None: for device in os.listdir('/dev/'): if 'tty' in device and 'usb' in device: serial_port = os.path.join('/dev/', device) # load calibration data if it's supplied self.cal = None if calibrationFile is not None: self.cal = pickle.load(open( calibrationFile, 'rb' )) print str(self.cal['matrix']) self.serial = serial.Serial(serial_port,9600,timeout=1) self.is_calibrated = self.cal is not None self.a = Bot.Arm(self, "1") self.b = Bot.Arm(self, "2") self.c = Bot.Arm(self, "3") def position(self): return (self.a.angle, self.b.angle, self.c.angle) def set_position(self, position): offset = len(position) - 3 t1 = int(round(position[offset])) t2 = int(round(position[offset+1])) t3 = int(round(position[offset+2])) self.move(t1,t2,t3,0) def send_command(self, command, position='0'): self.serial.write(command) self.serial.write(chr(position)) def move(self, a_angle, b_angle, c_angle, delay=0): if delay <= 0: print str(self.position()) self.a.set_angle(a_angle) self.b.set_angle(b_angle) self.c.set_angle(c_angle) else: while self.a.angle is not a_angle and self.b.angle is not b_angle and self.c.angle is not c_angle: self.a.move(a_angle,0,1) self.b.move(b_angle,0,1) self.c.move(c_angle,0,1) print str(self.position()) sleep(delay) def tap(self,x,y): # calculate positions delta_x = x - self.cal['screen_center'][0] delta_y = y - self.cal['screen_center'][1] robot_deltas = self.ipad_to_robot((delta_x, delta_y)) intermediate_arm_position = self.inverse_k(self.cal['origin_point'][0]+robot_deltas[0] , self.cal['origin_point'][1]+robot_deltas[1], self.cal['origin_point'][2]) final_arm_position = self.inverse_k(self.cal['contact_point'][0]+robot_deltas[0] , self.cal['contact_point'][1]+robot_deltas[1], self.cal['contact_point'][2]) # check for errors if intermediate_arm_position[0] == 1: raise Exception('Intermediate Arm Position Is Invalid: '+ str(intermediate_arm_position)) if final_arm_position[0] == 1: raise Exception('Final Arm Position Is Invalid: '+ str(final_arm_position)) # perform motion origin_position = self.inverse_k(self.cal['origin_point'][0], self.cal['origin_point'][1], self.cal['origin_point'][2]) self.set_position(origin_position) sleep(1.5) self.set_position(intermediate_arm_position) sleep(1.5) self.set_position(final_arm_position) sleep(1.5) self.set_position(intermediate_arm_position) sleep(1.5) self.set_position(origin_position) # Forward kinematics: (theta1, theta2, theta3) -> (x0, y0, z0) # Returned {error code, x0,y0,z0} def forward_k(self, theta1, theta2, theta3): return kinematics.forward(theta1, theta2, theta3) # Forward kinematics: (x, y, z) -> (theta1, theta2, theta3) # Returned {error code, theta1, theta2, theta3} def inverse_k(self, x, y, z): return kinematics.inverse(x,y,z) def robot_to_ipad(self, robot_distance): S = matrix(self.cal['matrix']) return (matrix([robot_distance[0], robot_distance[1]]) * S).tolist()[0] def ipad_to_robot(self, ipad_distance): I = inv(matrix(self.cal['matrix'])) return (matrix([ipad_distance[0], ipad_distance[1]]) * I).tolist()[0]
apache-2.0
612,778,788,196,825,100
38.582781
170
0.61034
false
3.602773
false
false
false
HexHive/datashield
compiler/llvm/tools/clang/docs/tools/dump_ast_matchers.py
1
14172
#!/usr/bin/env python # A tool to parse ASTMatchers.h and update the documentation in # ../LibASTMatchersReference.html automatically. Run from the # directory in which this file is located to update the docs. import collections import re import urllib2 MATCHERS_FILE = '../../include/clang/ASTMatchers/ASTMatchers.h' # Each matcher is documented in one row of the form: # result | name | argA # The subsequent row contains the documentation and is hidden by default, # becoming visible via javascript when the user clicks the matcher name. TD_TEMPLATE=""" <tr><td>%(result)s</td><td class="name" onclick="toggle('%(id)s')"><a name="%(id)sAnchor">%(name)s</a></td><td>%(args)s</td></tr> <tr><td colspan="4" class="doc" id="%(id)s"><pre>%(comment)s</pre></td></tr> """ # We categorize the matchers into these three categories in the reference: node_matchers = {} narrowing_matchers = {} traversal_matchers = {} # We output multiple rows per matcher if the matcher can be used on multiple # node types. Thus, we need a new id per row to control the documentation # pop-up. ids[name] keeps track of those ids. ids = collections.defaultdict(int) # Cache for doxygen urls we have already verified. doxygen_probes = {} def esc(text): """Escape any html in the given text.""" text = re.sub(r'&', '&amp;', text) text = re.sub(r'<', '&lt;', text) text = re.sub(r'>', '&gt;', text) def link_if_exists(m): name = m.group(1) url = 'http://clang.llvm.org/doxygen/classclang_1_1%s.html' % name if url not in doxygen_probes: try: print 'Probing %s...' % url urllib2.urlopen(url) doxygen_probes[url] = True except: doxygen_probes[url] = False if doxygen_probes[url]: return r'Matcher&lt;<a href="%s">%s</a>&gt;' % (url, name) else: return m.group(0) text = re.sub( r'Matcher&lt;([^\*&]+)&gt;', link_if_exists, text) return text def extract_result_types(comment): """Extracts a list of result types from the given comment. We allow annotations in the comment of the matcher to specify what nodes a matcher can match on. Those comments have the form: Usable as: Any Matcher | (Matcher<T1>[, Matcher<t2>[, ...]]) Returns ['*'] in case of 'Any Matcher', or ['T1', 'T2', ...]. Returns the empty list if no 'Usable as' specification could be parsed. """ result_types = [] m = re.search(r'Usable as: Any Matcher[\s\n]*$', comment, re.S) if m: return ['*'] while True: m = re.match(r'^(.*)Matcher<([^>]+)>\s*,?[\s\n]*$', comment, re.S) if not m: if re.search(r'Usable as:\s*$', comment): return result_types else: return None result_types += [m.group(2)] comment = m.group(1) def strip_doxygen(comment): """Returns the given comment without \-escaped words.""" # If there is only a doxygen keyword in the line, delete the whole line. comment = re.sub(r'^\\[^\s]+\n', r'', comment, flags=re.M) # If there is a doxygen \see command, change the \see prefix into "See also:". # FIXME: it would be better to turn this into a link to the target instead. comment = re.sub(r'\\see', r'See also:', comment) # Delete the doxygen command and the following whitespace. comment = re.sub(r'\\[^\s]+\s+', r'', comment) return comment def unify_arguments(args): """Gets rid of anything the user doesn't care about in the argument list.""" args = re.sub(r'internal::', r'', args) args = re.sub(r'const\s+', r'', args) args = re.sub(r'&', r' ', args) args = re.sub(r'(^|\s)M\d?(\s)', r'\1Matcher<*>\2', args) return args def add_matcher(result_type, name, args, comment, is_dyncast=False): """Adds a matcher to one of our categories.""" if name == 'id': # FIXME: Figure out whether we want to support the 'id' matcher. return matcher_id = '%s%d' % (name, ids[name]) ids[name] += 1 args = unify_arguments(args) matcher_html = TD_TEMPLATE % { 'result': esc('Matcher<%s>' % result_type), 'name': name, 'args': esc(args), 'comment': esc(strip_doxygen(comment)), 'id': matcher_id, } if is_dyncast: node_matchers[result_type + name] = matcher_html # Use a heuristic to figure out whether a matcher is a narrowing or # traversal matcher. By default, matchers that take other matchers as # arguments (and are not node matchers) do traversal. We specifically # exclude known narrowing matchers that also take other matchers as # arguments. elif ('Matcher<' not in args or name in ['allOf', 'anyOf', 'anything', 'unless']): narrowing_matchers[result_type + name + esc(args)] = matcher_html else: traversal_matchers[result_type + name + esc(args)] = matcher_html def act_on_decl(declaration, comment, allowed_types): """Parse the matcher out of the given declaration and comment. If 'allowed_types' is set, it contains a list of node types the matcher can match on, as extracted from the static type asserts in the matcher definition. """ if declaration.strip(): # Node matchers are defined by writing: # VariadicDynCastAllOfMatcher<ResultType, ArgumentType> name; m = re.match(r""".*Variadic(?:DynCast)?AllOfMatcher\s*< \s*([^\s,]+)\s*(?:, \s*([^\s>]+)\s*)?> \s*([^\s;]+)\s*;\s*$""", declaration, flags=re.X) if m: result, inner, name = m.groups() if not inner: inner = result add_matcher(result, name, 'Matcher<%s>...' % inner, comment, is_dyncast=True) return # Parse the various matcher definition macros. m = re.match(""".*AST_TYPE_MATCHER\( \s*([^\s,]+\s*), \s*([^\s,]+\s*) \)\s*;\s*$""", declaration, flags=re.X) if m: inner, name = m.groups() add_matcher('Type', name, 'Matcher<%s>...' % inner, comment, is_dyncast=True) # FIXME: re-enable once we have implemented casting on the TypeLoc # hierarchy. # add_matcher('TypeLoc', '%sLoc' % name, 'Matcher<%sLoc>...' % inner, # comment, is_dyncast=True) return m = re.match(""".*AST_TYPE(LOC)?_TRAVERSE_MATCHER\( \s*([^\s,]+\s*), \s*(?:[^\s,]+\s*), \s*AST_POLYMORPHIC_SUPPORTED_TYPES\(([^)]*)\) \)\s*;\s*$""", declaration, flags=re.X) if m: loc, name, results = m.groups()[0:3] result_types = [r.strip() for r in results.split(',')] comment_result_types = extract_result_types(comment) if (comment_result_types and sorted(result_types) != sorted(comment_result_types)): raise Exception('Inconsistent documentation for: %s' % name) for result_type in result_types: add_matcher(result_type, name, 'Matcher<Type>', comment) if loc: add_matcher('%sLoc' % result_type, '%sLoc' % name, 'Matcher<TypeLoc>', comment) return m = re.match(r"""^\s*AST_POLYMORPHIC_MATCHER(_P)?(.?)(?:_OVERLOAD)?\( \s*([^\s,]+)\s*, \s*AST_POLYMORPHIC_SUPPORTED_TYPES\(([^)]*)\) (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*\d+\s*)? \)\s*{\s*$""", declaration, flags=re.X) if m: p, n, name, results = m.groups()[0:4] args = m.groups()[4:] result_types = [r.strip() for r in results.split(',')] if allowed_types and allowed_types != result_types: raise Exception('Inconsistent documentation for: %s' % name) if n not in ['', '2']: raise Exception('Cannot parse "%s"' % declaration) args = ', '.join('%s %s' % (args[i], args[i+1]) for i in range(0, len(args), 2) if args[i]) for result_type in result_types: add_matcher(result_type, name, args, comment) return m = re.match(r"""^\s*AST_MATCHER_FUNCTION(_P)?(.?)(?:_OVERLOAD)?\( (?:\s*([^\s,]+)\s*,)? \s*([^\s,]+)\s* (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*\d+\s*)? \)\s*{\s*$""", declaration, flags=re.X) if m: p, n, result, name = m.groups()[0:4] args = m.groups()[4:] if n not in ['', '2']: raise Exception('Cannot parse "%s"' % declaration) args = ', '.join('%s %s' % (args[i], args[i+1]) for i in range(0, len(args), 2) if args[i]) add_matcher(result, name, args, comment) return m = re.match(r"""^\s*AST_MATCHER(_P)?(.?)(?:_OVERLOAD)?\( (?:\s*([^\s,]+)\s*,)? \s*([^\s,]+)\s* (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*\d+\s*)? \)\s*{\s*$""", declaration, flags=re.X) if m: p, n, result, name = m.groups()[0:4] args = m.groups()[4:] if not result: if not allowed_types: raise Exception('Did not find allowed result types for: %s' % name) result_types = allowed_types else: result_types = [result] if n not in ['', '2']: raise Exception('Cannot parse "%s"' % declaration) args = ', '.join('%s %s' % (args[i], args[i+1]) for i in range(0, len(args), 2) if args[i]) for result_type in result_types: add_matcher(result_type, name, args, comment) return # Parse ArgumentAdapting matchers. m = re.match( r"""^.*ArgumentAdaptingMatcherFunc<.*>\s*(?:LLVM_ATTRIBUTE_UNUSED\s*) ([a-zA-Z]*)\s*=\s*{};$""", declaration, flags=re.X) if m: name = m.groups()[0] add_matcher('*', name, 'Matcher<*>', comment) return # Parse Variadic operator matchers. m = re.match( r"""^.*VariadicOperatorMatcherFunc\s*<\s*([^,]+),\s*([^\s>]+)\s*>\s* ([a-zA-Z]*)\s*=\s*{.*};$""", declaration, flags=re.X) if m: min_args, max_args, name = m.groups()[:3] if max_args == '1': add_matcher('*', name, 'Matcher<*>', comment) return elif max_args == 'UINT_MAX': add_matcher('*', name, 'Matcher<*>, ..., Matcher<*>', comment) return # Parse free standing matcher functions, like: # Matcher<ResultType> Name(Matcher<ArgumentType> InnerMatcher) { m = re.match(r"""^\s*(.*)\s+ ([^\s\(]+)\s*\( (.*) \)\s*{""", declaration, re.X) if m: result, name, args = m.groups() args = ', '.join(p.strip() for p in args.split(',')) m = re.match(r'.*\s+internal::(Bindable)?Matcher<([^>]+)>$', result) if m: result_types = [m.group(2)] else: result_types = extract_result_types(comment) if not result_types: if not comment: # Only overloads don't have their own doxygen comments; ignore those. print 'Ignoring "%s"' % name else: print 'Cannot determine result type for "%s"' % name else: for result_type in result_types: add_matcher(result_type, name, args, comment) else: print '*** Unparsable: "' + declaration + '" ***' def sort_table(matcher_type, matcher_map): """Returns the sorted html table for the given row map.""" table = '' for key in sorted(matcher_map.keys()): table += matcher_map[key] + '\n' return ('<!-- START_%(type)s_MATCHERS -->\n' + '%(table)s' + '<!--END_%(type)s_MATCHERS -->') % { 'type': matcher_type, 'table': table, } # Parse the ast matchers. # We alternate between two modes: # body = True: We parse the definition of a matcher. We need # to parse the full definition before adding a matcher, as the # definition might contain static asserts that specify the result # type. # body = False: We parse the comments and declaration of the matcher. comment = '' declaration = '' allowed_types = [] body = False for line in open(MATCHERS_FILE).read().splitlines(): if body: if line.strip() and line[0] == '}': if declaration: act_on_decl(declaration, comment, allowed_types) comment = '' declaration = '' allowed_types = [] body = False else: m = re.search(r'is_base_of<([^,]+), NodeType>', line) if m and m.group(1): allowed_types += [m.group(1)] continue if line.strip() and line.lstrip()[0] == '/': comment += re.sub(r'/+\s?', '', line) + '\n' else: declaration += ' ' + line if ((not line.strip()) or line.rstrip()[-1] == ';' or (line.rstrip()[-1] == '{' and line.rstrip()[-3:] != '= {')): if line.strip() and line.rstrip()[-1] == '{': body = True else: act_on_decl(declaration, comment, allowed_types) comment = '' declaration = '' allowed_types = [] node_matcher_table = sort_table('DECL', node_matchers) narrowing_matcher_table = sort_table('NARROWING', narrowing_matchers) traversal_matcher_table = sort_table('TRAVERSAL', traversal_matchers) reference = open('../LibASTMatchersReference.html').read() reference = re.sub(r'<!-- START_DECL_MATCHERS.*END_DECL_MATCHERS -->', '%s', reference, flags=re.S) % node_matcher_table reference = re.sub(r'<!-- START_NARROWING_MATCHERS.*END_NARROWING_MATCHERS -->', '%s', reference, flags=re.S) % narrowing_matcher_table reference = re.sub(r'<!-- START_TRAVERSAL_MATCHERS.*END_TRAVERSAL_MATCHERS -->', '%s', reference, flags=re.S) % traversal_matcher_table with open('../LibASTMatchersReference.html', 'wb') as output: output.write(reference)
gpl-3.0
5,687,331,996,876,751,000
36.893048
129
0.542549
false
3.440641
false
false
false
mrkm4ntr/incubator-airflow
docs/exts/docs_build/lint_checks.py
1
8575
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import ast import os import re from glob import glob from itertools import chain from typing import Iterable, List, Optional, Set from docs.exts.docs_build.errors import DocBuildError # pylint: disable=no-name-in-module ROOT_PROJECT_DIR = os.path.abspath( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir) ) ROOT_PACKAGE_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow") DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs") def find_existing_guide_operator_names(src_dir: str) -> Set[str]: """ Find names of existing operators. :return names of existing operators. """ operator_names = set() paths = glob(f"{src_dir}/**/*.rst", recursive=True) for path in paths: with open(path) as f: operator_names |= set(re.findall(".. _howto/operator:(.+?):", f.read())) return operator_names def extract_ast_class_def_by_name(ast_tree, class_name): """ Extracts class definition by name :param ast_tree: AST tree :param class_name: name of the class. :return: class node found """ class ClassVisitor(ast.NodeVisitor): """Visitor.""" def __init__(self): self.found_class_node = None def visit_ClassDef(self, node): # pylint: disable=invalid-name """ Visit class definition. :param node: node. :return: """ if node.name == class_name: self.found_class_node = node visitor = ClassVisitor() visitor.visit(ast_tree) return visitor.found_class_node def check_guide_links_in_operator_descriptions() -> List[DocBuildError]: """Check if there are links to guides in operator's descriptions.""" # TODO: We should also check the guides in the provider documentations. # For now, we are only checking the core documentation. # This is easiest to do after the content has been fully migrated. build_errors = [] def generate_build_error(path, line_no, operator_name): return DocBuildError( package_name=None, file_path=path, line_no=line_no, message=( f"Link to the guide is missing in operator's description: {operator_name}.\n" f"Please add link to the guide to the description in the following form:\n" f"\n" f".. seealso::\n" f" For more information on how to use this operator, take a look at the guide:\n" f" :ref:`apache-airflow:howto/operator:{operator_name}`\n" ), ) # Extract operators for which there are existing .rst guides operator_names = find_existing_guide_operator_names(f"{DOCS_DIR}/howto/operator") # Extract all potential python modules that can contain operators python_module_paths = chain( glob(f"{ROOT_PACKAGE_DIR}/operators/*.py"), glob(f"{ROOT_PACKAGE_DIR}/sensors/*.py"), glob(f"{ROOT_PACKAGE_DIR}/providers/**/operators/*.py", recursive=True), glob(f"{ROOT_PACKAGE_DIR}/providers/**/sensors/*.py", recursive=True), glob(f"{ROOT_PACKAGE_DIR}/providers/**/transfers/*.py", recursive=True), ) for py_module_path in python_module_paths: with open(py_module_path) as f: py_content = f.read() if "This module is deprecated" in py_content: continue for existing_operator in operator_names: if f"class {existing_operator}" not in py_content: continue # This is a potential file with necessary class definition. # To make sure it's a real Python class definition, we build AST tree ast_tree = ast.parse(py_content) class_def = extract_ast_class_def_by_name(ast_tree, existing_operator) if class_def is None: continue docstring = ast.get_docstring(class_def) if "This class is deprecated." in docstring: continue if f":ref:`apache-airflow:howto/operator:{existing_operator}`" in ast.get_docstring( class_def ) or f":ref:`howto/operator:{existing_operator}`" in ast.get_docstring(class_def): continue build_errors.append(generate_build_error(py_module_path, class_def.lineno, existing_operator)) return build_errors def assert_file_not_contains(file_path: str, pattern: str, message: str) -> Optional[DocBuildError]: """ Asserts that file does not contain the pattern. Return message error if it does. :param file_path: file :param pattern: pattern :param message: message to return """ with open(file_path, "rb", 0) as doc_file: pattern_compiled = re.compile(pattern) for num, line in enumerate(doc_file, 1): line_decode = line.decode() if re.search(pattern_compiled, line_decode): return DocBuildError(file_path=file_path, line_no=num, message=message) return None def filter_file_list_by_pattern(file_paths: Iterable[str], pattern: str) -> List[str]: """ Filters file list to those tha content matches the pattern :param file_paths: file paths to check :param pattern: pattern to match :return: list of files matching the pattern """ output_paths = [] pattern_compiled = re.compile(pattern) for file_path in file_paths: with open(file_path, "rb", 0) as text_file: text_file_content = text_file.read().decode() if re.findall(pattern_compiled, text_file_content): output_paths.append(file_path) return output_paths def find_modules(deprecated_only: bool = False) -> Set[str]: """ Finds all modules. :param deprecated_only: whether only deprecated modules should be found. :return: set of all modules found """ file_paths = glob(f"{ROOT_PACKAGE_DIR}/**/*.py", recursive=True) # Exclude __init__.py file_paths = [f for f in file_paths if not f.endswith("__init__.py")] if deprecated_only: file_paths = filter_file_list_by_pattern(file_paths, r"This module is deprecated.") # Make path relative file_paths = [os.path.relpath(f, ROOT_PROJECT_DIR) for f in file_paths] # Convert filename to module modules_names = {file_path.rpartition(".")[0].replace("/", ".") for file_path in file_paths} return modules_names def check_exampleinclude_for_example_dags() -> List[DocBuildError]: """Checks all exampleincludes for example dags.""" all_docs_files = glob(f"${DOCS_DIR}/**/*rst", recursive=True) build_errors = [] for doc_file in all_docs_files: build_error = assert_file_not_contains( file_path=doc_file, pattern=r"literalinclude::.+example_dags", message=( "literalinclude directive is prohibited for example DAGs. \n" "You should use the exampleinclude directive to include example DAGs." ), ) if build_error: build_errors.append(build_error) return build_errors def check_enforce_code_block() -> List[DocBuildError]: """Checks all code:: blocks.""" all_docs_files = glob(f"{DOCS_DIR}/**/*rst", recursive=True) build_errors = [] for doc_file in all_docs_files: build_error = assert_file_not_contains( file_path=doc_file, pattern=r"^.. code::", message=( "We recommend using the code-block directive instead of the code directive. " "The code-block directive is more feature-full." ), ) if build_error: build_errors.append(build_error) return build_errors
apache-2.0
-1,313,190,920,077,928,000
36.609649
106
0.63207
false
3.937098
false
false
false
baloo/shinken
shinken/external_command.py
1
78581
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2009-2011 : # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. import os import time from shinken.util import to_int, to_bool, safe_print from shinken.downtime import Downtime from shinken.contactdowntime import ContactDowntime from shinken.comment import Comment from shinken.commandcall import CommandCall from shinken.log import logger from shinken.pollerlink import PollerLink MODATTR_NONE = 0 MODATTR_NOTIFICATIONS_ENABLED = 1 MODATTR_ACTIVE_CHECKS_ENABLED = 2 MODATTR_PASSIVE_CHECKS_ENABLED = 4 MODATTR_EVENT_HANDLER_ENABLED = 8 MODATTR_FLAP_DETECTION_ENABLED = 16 MODATTR_FAILURE_PREDICTION_ENABLED = 32 MODATTR_PERFORMANCE_DATA_ENABLED = 64 MODATTR_OBSESSIVE_HANDLER_ENABLED = 128 MODATTR_EVENT_HANDLER_COMMAND = 256 MODATTR_CHECK_COMMAND = 512 MODATTR_NORMAL_CHECK_INTERVAL = 1024 MODATTR_RETRY_CHECK_INTERVAL = 2048 MODATTR_MAX_CHECK_ATTEMPTS = 4096 MODATTR_FRESHNESS_CHECKS_ENABLED = 8192 MODATTR_CHECK_TIMEPERIOD = 16384 MODATTR_CUSTOM_VARIABLE = 32768 MODATTR_NOTIFICATION_TIMEPERIOD = 65536 """ TODO : Add some comment about this class for the doc""" class ExternalCommand: my_type = 'externalcommand' def __init__(self, cmd_line): self.cmd_line = cmd_line """ TODO : Add some comment about this class for the doc""" class ExternalCommandManager: commands = { 'CHANGE_CONTACT_MODSATTR' : {'global' : True, 'args' : ['contact', None]}, 'CHANGE_CONTACT_MODHATTR' : {'global' : True, 'args' : ['contact', None]}, 'CHANGE_CONTACT_MODATTR' : {'global' : True, 'args' : ['contact', None]}, 'CHANGE_CONTACT_HOST_NOTIFICATION_TIMEPERIOD' : {'global' : True, 'args' : ['contact', 'time_period']}, 'ADD_SVC_COMMENT' : {'global' : False, 'args' : ['service', 'to_bool', 'author', None]}, 'ADD_HOST_COMMENT' : {'global' : False, 'args' : ['host', 'to_bool', 'author', None]}, 'ACKNOWLEDGE_SVC_PROBLEM' : {'global' : False, 'args' : ['service' , 'to_int', 'to_bool', 'to_bool', 'author', None]}, 'ACKNOWLEDGE_HOST_PROBLEM' : {'global' : False, 'args' : ['host', 'to_int', 'to_bool', 'to_bool', 'author', None]}, 'ACKNOWLEDGE_SVC_PROBLEM_EXPIRE' : {'global' : False, 'args' : ['service' , 'to_int', 'to_bool', 'to_bool', 'to_int', 'author', None]}, 'ACKNOWLEDGE_HOST_PROBLEM_EXPIRE' : {'global' : False, 'args' : ['host', 'to_int', 'to_bool', 'to_bool', 'to_int', 'author', None]}, 'CHANGE_CONTACT_SVC_NOTIFICATION_TIMEPERIOD' : {'global' : True, 'args' : ['contact', 'time_period']}, 'CHANGE_CUSTOM_CONTACT_VAR' : {'global' : True, 'args' : ['contact', None,None]}, 'CHANGE_CUSTOM_HOST_VAR' : {'global' : False, 'args' : ['host', None,None]}, 'CHANGE_CUSTOM_SVC_VAR' : {'global' : False, 'args' : ['service', None,None]}, 'CHANGE_GLOBAL_HOST_EVENT_HANDLER' : {'global' : True, 'args' : ['command']}, 'CHANGE_GLOBAL_SVC_EVENT_HANDLER' : {'global' : True, 'args' : ['command']}, 'CHANGE_HOST_CHECK_COMMAND' : {'global' : False, 'args' : ['host', 'command']}, 'CHANGE_HOST_CHECK_TIMEPERIOD' : {'global' : False, 'args' : ['host', 'time_period']}, 'CHANGE_HOST_EVENT_HANDLER' : {'global' : False, 'args' : ['host', 'command']}, 'CHANGE_HOST_MODATTR' : {'global' : False, 'args' : ['host', 'to_int']}, 'CHANGE_MAX_HOST_CHECK_ATTEMPTS': {'global' : False, 'args' : ['host', 'to_int']}, 'CHANGE_MAX_SVC_CHECK_ATTEMPTS' : {'global' : False, 'args' : ['service', 'to_int']}, 'CHANGE_NORMAL_HOST_CHECK_INTERVAL' : {'global' : False, 'args' : ['host', 'to_int']}, 'CHANGE_NORMAL_SVC_CHECK_INTERVAL' : {'global' : False, 'args' : ['service', 'to_int']}, 'CHANGE_RETRY_HOST_CHECK_INTERVAL' : {'global' : False, 'args' : ['service', 'to_int']}, 'CHANGE_RETRY_SVC_CHECK_INTERVAL' : {'global' : False, 'args' : ['service', 'to_int']}, 'CHANGE_SVC_CHECK_COMMAND' : {'global' : False, 'args' : ['service', 'command']}, 'CHANGE_SVC_CHECK_TIMEPERIOD' : {'global' : False, 'args' : ['service', 'time_period']}, 'CHANGE_SVC_EVENT_HANDLER' : {'global' : False, 'args' : ['service', 'command']}, 'CHANGE_SVC_MODATTR' : {'global' : False, 'args' : ['service', 'to_int']}, 'CHANGE_SVC_NOTIFICATION_TIMEPERIOD' : {'global' : False, 'args' : ['service', 'time_period']}, 'DELAY_HOST_NOTIFICATION' : {'global' : False, 'args' : ['host', 'to_int']}, 'DELAY_SVC_NOTIFICATION' : {'global' : False, 'args' : ['service', 'to_int']}, 'DEL_ALL_HOST_COMMENTS' : {'global' : False, 'args' : ['host']}, 'DEL_ALL_HOST_DOWNTIMES' : {'global' : False, 'args' : ['host']}, 'DEL_ALL_SVC_COMMENTS' : {'global' : False, 'args' : ['service']}, 'DEL_ALL_SVC_DOWNTIMES' : {'global' : False, 'args' : ['service']}, 'DEL_CONTACT_DOWNTIME' : {'global' : True, 'args' : ['to_int']}, 'DEL_HOST_COMMENT' : {'global' : True, 'args' : ['to_int']}, 'DEL_HOST_DOWNTIME' : {'global' : True, 'args' : ['to_int']}, 'DEL_SVC_COMMENT' : {'global' : True, 'args' : ['to_int']}, 'DEL_SVC_DOWNTIME' : {'global' : True, 'args' : ['to_int']}, 'DISABLE_ALL_NOTIFICATIONS_BEYOND_HOST' : {'global' : False, 'args' : ['host']}, 'DISABLE_CONTACTGROUP_HOST_NOTIFICATIONS' : {'global' : True, 'args' : ['contact_group']}, 'DISABLE_CONTACTGROUP_SVC_NOTIFICATIONS' : {'global' : True, 'args' : ['contact_group']}, 'DISABLE_CONTACT_HOST_NOTIFICATIONS' : {'global' : True, 'args' : ['contact']}, 'DISABLE_CONTACT_SVC_NOTIFICATIONS' : {'global' : True, 'args' : ['contact']}, 'DISABLE_EVENT_HANDLERS' : {'global' : True, 'args' : []}, 'DISABLE_FAILURE_PREDICTION' : {'global' : True, 'args' : []}, 'DISABLE_FLAP_DETECTION' : {'global' : True, 'args' : []}, 'DISABLE_HOSTGROUP_HOST_CHECKS' : {'global' : True, 'args' : ['host_group']}, 'DISABLE_HOSTGROUP_HOST_NOTIFICATIONS' : {'global' : True, 'args' : ['host_group']}, 'DISABLE_HOSTGROUP_PASSIVE_HOST_CHECKS' : {'global' : True, 'args' : ['host_group']}, 'DISABLE_HOSTGROUP_PASSIVE_SVC_CHECKS' : {'global' : True, 'args' : ['host_group']}, 'DISABLE_HOSTGROUP_SVC_CHECKS' : {'global' : True, 'args' : ['host_group']}, 'DISABLE_HOSTGROUP_SVC_NOTIFICATIONS' : {'global' : True, 'args' : ['host_group']}, 'DISABLE_HOST_AND_CHILD_NOTIFICATIONS' : {'global' : False, 'args' : ['host']}, 'DISABLE_HOST_CHECK' : {'global' : False, 'args' : ['host']}, 'DISABLE_HOST_EVENT_HANDLER' : {'global' : False, 'args' : ['host']}, 'DISABLE_HOST_FLAP_DETECTION' : {'global' : False, 'args' : ['host']}, 'DISABLE_HOST_FRESHNESS_CHECKS' : {'global' : True, 'args' : []}, 'DISABLE_HOST_NOTIFICATIONS' : {'global' : False, 'args' : ['host']}, 'DISABLE_HOST_SVC_CHECKS' : {'global' : False, 'args' : ['host']}, 'DISABLE_HOST_SVC_NOTIFICATIONS' : {'global' : False, 'args' : ['host']}, 'DISABLE_NOTIFICATIONS' : {'global' : True, 'args' : []}, 'DISABLE_PASSIVE_HOST_CHECKS' : {'global' : False, 'args' : ['host']}, 'DISABLE_PASSIVE_SVC_CHECKS' : {'global' : False, 'args' : ['service']}, 'DISABLE_PERFORMANCE_DATA' : {'global' : True, 'args' : []}, 'DISABLE_SERVICEGROUP_HOST_CHECKS' : {'global' : True, 'args' : ['service_group']}, 'DISABLE_SERVICEGROUP_HOST_NOTIFICATIONS' : {'global' : True, 'args' : ['service_group']}, 'DISABLE_SERVICEGROUP_PASSIVE_HOST_CHECKS' : {'global' : True, 'args' : ['service_group']}, 'DISABLE_SERVICEGROUP_PASSIVE_SVC_CHECKS' : {'global' : True, 'args' : ['service_group']}, 'DISABLE_SERVICEGROUP_SVC_CHECKS' : {'global' : True, 'args' : ['service_group']}, 'DISABLE_SERVICEGROUP_SVC_NOTIFICATIONS' : {'global' : True, 'args' : ['service_group']}, 'DISABLE_SERVICE_FLAP_DETECTION' : {'global' : False, 'args' : ['service']}, 'DISABLE_SERVICE_FRESHNESS_CHECKS' : {'global' : True, 'args' : []}, 'DISABLE_SVC_CHECK' : {'global' : False, 'args' : ['service']}, 'DISABLE_SVC_EVENT_HANDLER' : {'global' : False, 'args' : ['service']}, 'DISABLE_SVC_FLAP_DETECTION' : {'global' : False, 'args' : ['service']}, 'DISABLE_SVC_NOTIFICATIONS' : {'global' : False, 'args' : ['service']}, 'ENABLE_ALL_NOTIFICATIONS_BEYOND_HOST' : {'global' : False, 'args' : ['host']}, 'ENABLE_CONTACTGROUP_HOST_NOTIFICATIONS' : {'global' : True, 'args' : ['contact_group']}, 'ENABLE_CONTACTGROUP_SVC_NOTIFICATIONS' : {'global' : True, 'args' : ['contact_group']}, 'ENABLE_CONTACT_HOST_NOTIFICATIONS' : {'global' : True, 'args' : ['contact']}, 'ENABLE_CONTACT_SVC_NOTIFICATIONS' : {'global' : True, 'args' : ['contact']}, 'ENABLE_EVENT_HANDLERS' : {'global' : True, 'args' : []}, 'ENABLE_FAILURE_PREDICTION' : {'global' : True, 'args' : []}, 'ENABLE_FLAP_DETECTION' : {'global' : True, 'args' : []}, 'ENABLE_HOSTGROUP_HOST_CHECKS' : {'global' : True, 'args' : ['host_group']}, 'ENABLE_HOSTGROUP_HOST_NOTIFICATIONS' : {'global' : True, 'args' : ['host_group']}, 'ENABLE_HOSTGROUP_PASSIVE_HOST_CHECKS' : {'global' : True, 'args' : ['host_group']}, 'ENABLE_HOSTGROUP_PASSIVE_SVC_CHECKS' : {'global' : True, 'args' : ['host_group']}, 'ENABLE_HOSTGROUP_SVC_CHECKS' : {'global' : True, 'args' : ['host_group']}, 'ENABLE_HOSTGROUP_SVC_NOTIFICATIONS' : {'global' : True, 'args' : ['host_group']}, 'ENABLE_HOST_AND_CHILD_NOTIFICATIONS' : {'global' : False, 'args' : ['host']}, 'ENABLE_HOST_CHECK' : {'global' : False, 'args' : ['host']}, 'ENABLE_HOST_EVENT_HANDLER' : {'global' : False, 'args' : ['host']}, 'ENABLE_HOST_FLAP_DETECTION' : {'global' : False, 'args' : ['host']}, 'ENABLE_HOST_FRESHNESS_CHECKS' : {'global' : True, 'args' : []}, 'ENABLE_HOST_NOTIFICATIONS' : {'global' : False, 'args' : ['host']}, 'ENABLE_HOST_SVC_CHECKS' : {'global' : False, 'args' : ['host']}, 'ENABLE_HOST_SVC_NOTIFICATIONS' : {'global' : False, 'args' : ['host']}, 'ENABLE_NOTIFICATIONS' : {'global' : True, 'args' : []}, 'ENABLE_PASSIVE_HOST_CHECKS' : {'global' : False, 'args' : ['host']}, 'ENABLE_PASSIVE_SVC_CHECKS' : {'global' : False, 'args' : ['service']}, 'ENABLE_PERFORMANCE_DATA' : {'global' : True, 'args' : []}, 'ENABLE_SERVICEGROUP_HOST_CHECKS' : {'global' : True, 'args' : ['service_group']}, 'ENABLE_SERVICEGROUP_HOST_NOTIFICATIONS' : {'global' : True, 'args' : ['service_group']}, 'ENABLE_SERVICEGROUP_PASSIVE_HOST_CHECKS' : {'global' : True, 'args' : ['service_group']}, 'ENABLE_SERVICEGROUP_PASSIVE_SVC_CHECKS' : {'global' : True, 'args' : ['service_group']}, 'ENABLE_SERVICEGROUP_SVC_CHECKS' : {'global' : True, 'args' : ['service_group']}, 'ENABLE_SERVICEGROUP_SVC_NOTIFICATIONS' : {'global' : True, 'args' : ['service_group']}, 'ENABLE_SERVICE_FRESHNESS_CHECKS' : {'global' : True, 'args' : []}, 'ENABLE_SVC_CHECK': {'global' : False, 'args' : ['service']}, 'ENABLE_SVC_EVENT_HANDLER' : {'global' : False, 'args' : ['service']}, 'ENABLE_SVC_FLAP_DETECTION' : {'global' : False, 'args' : ['service']}, 'ENABLE_SVC_NOTIFICATIONS' : {'global' : False, 'args' : ['service']}, 'PROCESS_FILE' : {'global' : True, 'args' : [None, 'to_bool']}, 'PROCESS_HOST_CHECK_RESULT' : {'global' : False, 'args' : ['host', 'to_int', None]}, 'PROCESS_SERVICE_CHECK_RESULT' : {'global' : False, 'args' : ['service', 'to_int', None]}, 'READ_STATE_INFORMATION' : {'global' : True, 'args' : []}, 'REMOVE_HOST_ACKNOWLEDGEMENT' : {'global' : False, 'args' : ['host']}, 'REMOVE_SVC_ACKNOWLEDGEMENT' : {'global' : False, 'args' : ['service']}, 'RESTART_PROGRAM' : {'global' : True, 'args' : []}, 'SAVE_STATE_INFORMATION' : {'global' : True, 'args' : []}, 'SCHEDULE_AND_PROPAGATE_HOST_DOWNTIME' : {'global' : False, 'args' : ['host', 'to_int', 'to_int', 'to_bool', 'to_int', 'to_int', 'author',None]}, 'SCHEDULE_AND_PROPAGATE_TRIGGERED_HOST_DOWNTIME' : {'global' : False, 'args' : ['host', 'to_int', 'to_int', 'to_bool', 'to_int', 'to_int', 'author', None]}, 'SCHEDULE_CONTACT_DOWNTIME' : {'global' : True, 'args' : ['contact', 'to_int', 'to_int', 'author', None]}, 'SCHEDULE_FORCED_HOST_CHECK' : {'global' : False, 'args' : ['host', 'to_int']}, 'SCHEDULE_FORCED_HOST_SVC_CHECKS' : {'global' : False, 'args' : ['host', 'to_int']}, 'SCHEDULE_FORCED_SVC_CHECK' : {'global' : False, 'args' : ['service', 'to_int']}, 'SCHEDULE_HOSTGROUP_HOST_DOWNTIME' : {'global' : True, 'args' : ['host_group', 'to_int', 'to_int', 'to_bool', 'to_int', 'to_int', 'author',None]}, 'SCHEDULE_HOSTGROUP_SVC_DOWNTIME' : {'global' : True, 'args' : ['host_group', 'to_int', 'to_int', 'to_bool', 'to_int', 'to_int', 'author',None]}, 'SCHEDULE_HOST_CHECK' : {'global' : False, 'args' : ['host', 'to_int']}, 'SCHEDULE_HOST_DOWNTIME' : {'global' : False, 'args' : ['host', 'to_int', 'to_int', 'to_bool', 'to_int', 'to_int', 'author', None]}, 'SCHEDULE_HOST_SVC_CHECKS' : {'global' : False, 'args' : ['host', 'to_int']}, 'SCHEDULE_HOST_SVC_DOWNTIME' : {'global' : False, 'args' : ['host', 'to_int', 'to_int', 'to_bool', 'to_int', 'to_int', 'author', None]}, 'SCHEDULE_SERVICEGROUP_HOST_DOWNTIME' : {'global' : True, 'args' : ['service_group', 'to_int', 'to_int', 'to_bool', 'to_int', 'to_int', 'author', None]}, 'SCHEDULE_SERVICEGROUP_SVC_DOWNTIME' : {'global' : True, 'args' : ['service_group', 'to_int', 'to_int', 'to_bool', 'to_int', 'to_int', 'author', None]}, 'SCHEDULE_SVC_CHECK' : {'global' : False, 'args' : ['service', 'to_int']}, 'SCHEDULE_SVC_DOWNTIME' : {'global' : False, 'args' : ['service', 'to_int', 'to_int', 'to_bool', 'to_int', 'to_int', 'author', None]}, 'SEND_CUSTOM_HOST_NOTIFICATION' : {'global' : False, 'args' : ['host', 'to_int', 'author', None]}, 'SEND_CUSTOM_SVC_NOTIFICATION' : {'global' : False, 'args' : ['service', 'to_int', 'author', None]}, 'SET_HOST_NOTIFICATION_NUMBER' : {'global' : False, 'args' : ['host', 'to_int']}, 'SET_SVC_NOTIFICATION_NUMBER' : {'global' : False, 'args' : ['service', 'to_int']}, 'SHUTDOWN_PROGRAM' : {'global' : True, 'args' : []}, 'START_ACCEPTING_PASSIVE_HOST_CHECKS' : {'global' : True, 'args' : []}, 'START_ACCEPTING_PASSIVE_SVC_CHECKS' : {'global' : True, 'args' : []}, 'START_EXECUTING_HOST_CHECKS' : {'global' : True, 'args' : []}, 'START_EXECUTING_SVC_CHECKS' : {'global' : True, 'args' : []}, 'START_OBSESSING_OVER_HOST' : {'global' : False, 'args' : ['host']}, 'START_OBSESSING_OVER_HOST_CHECKS' : {'global' : True, 'args' : []}, 'START_OBSESSING_OVER_SVC' : {'global' : False, 'args' : ['service']}, 'START_OBSESSING_OVER_SVC_CHECKS' : {'global' : True, 'args' : []}, 'STOP_ACCEPTING_PASSIVE_HOST_CHECKS' : {'global' : True, 'args' : []}, 'STOP_ACCEPTING_PASSIVE_SVC_CHECKS' : {'global' : True, 'args' : []}, 'STOP_EXECUTING_HOST_CHECKS' : {'global' : True, 'args' : []}, 'STOP_EXECUTING_SVC_CHECKS' : {'global' : True, 'args' : []}, 'STOP_OBSESSING_OVER_HOST' : {'global' : False, 'args' : ['host']}, 'STOP_OBSESSING_OVER_HOST_CHECKS' : {'global' : True, 'args' : []}, 'STOP_OBSESSING_OVER_SVC' : {'global' : False, 'args' : ['service']}, 'STOP_OBSESSING_OVER_SVC_CHECKS' : {'global' : True, 'args' : []}, 'LAUNCH_SVC_EVENT_HANDLER' : {'global' : False, 'args' : ['service']}, 'LAUNCH_HOST_EVENT_HANDLER' : {'global' : False, 'args' : ['host']}, # Now internal calls 'ADD_SIMPLE_HOST_DEPENDENCY' : {'global' : False, 'args' : ['host', 'host']}, 'DEL_HOST_DEPENDENCY' : {'global' : False, 'args' : ['host', 'host']}, 'ADD_SIMPLE_POLLER' : {'global' : True, 'internal' : True, 'args' : [None, None, None, None]}, } def __init__(self, conf, mode): self.mode = mode self.conf = conf self.hosts = conf.hosts self.services = conf.services self.contacts = conf.contacts self.hostgroups = conf.hostgroups self.commands = conf.commands self.servicegroups = conf.servicegroups self.contactgroups = conf.contactgroups self.timeperiods = conf.timeperiods self.pipe_path = conf.command_file self.fifo = None self.cmd_fragments = '' if self.mode == 'dispatcher': self.confs = conf.confs def load_scheduler(self, scheduler): self.sched = scheduler def load_arbiter(self, arbiter): self.arbiter = arbiter def open(self): # At the first open del and create the fifo if self.fifo is None: if os.path.exists(self.pipe_path): os.unlink(self.pipe_path) if not os.path.exists(self.pipe_path): os.umask(0) try : os.mkfifo(self.pipe_path, 0660) open(self.pipe_path, 'w+', os.O_NONBLOCK) except OSError , exp: print "Error : pipe creation failed (",self.pipe_path,')', exp return None self.fifo = os.open(self.pipe_path, os.O_NONBLOCK) return self.fifo def get(self): buf = os.read(self.fifo, 8096) r = [] fullbuf = len(buf) == 8096 and True or False # If the buffer ended with a fragment last time, prepend it here buf = self.cmd_fragments + buf buflen = len(buf) self.cmd_fragments = '' if fullbuf and buf[-1] != '\n': # The buffer was full but ends with a command fragment r.extend([ExternalCommand(s) for s in (buf.split('\n'))[:-1] if s]) self.cmd_fragments = (buf.split('\n'))[-1] elif buflen: # The buffer is either half-filled or full with a '\n' at the end. r.extend([ExternalCommand(s) for s in buf.split('\n') if s]) else: # The buffer is empty. We "reset" the fifo here. It will be # re-opened in the main loop. os.close(self.fifo) return r def resolve_command(self, excmd): # Maybe the command is invalid. Bailout try: command = excmd.cmd_line except AttributeError, exp: print "DBG: resolve_command:: error with command", excmd, exp return # Strip and get utf8 only strings command = command.strip() #Only log if we are in the Arbiter if self.mode == 'dispatcher' and self.conf.log_external_commands: logger.log('EXTERNAL COMMAND: '+command.rstrip()) r = self.get_command_and_args(command) if r is not None: is_global = r['global'] if not is_global: c_name = r['c_name'] args = r['args'] print "Got commands", c_name, args f = getattr(self, c_name) apply(f, args) else: command = r['cmd'] self.dispatch_global_command(command) # Ok the command is not for every one, so we search # by the hostname which scheduler have the host. Then send # the command def search_host_and_dispatch(self, host_name, command): safe_print("Calling search_host_and_dispatch", 'for', host_name) host_found = False for cfg in self.confs.values(): if cfg.hosts.find_by_name(host_name) is not None: safe_print("Host", host_name, "found in a configuration") if cfg.is_assigned : host_found = True sched = cfg.assigned_to safe_print("Sending command to the scheduler", sched.get_name()) #sched.run_external_command(command) sched.external_commands.append(command) break else: print "Problem: a configuration is found, but is not assigned!" if not host_found: logger.log("Warning: Passive check result was received for host '%s', but the host could not be found!" % host_name) #print "Sorry but the host", host_name, "was not found" # The command is global, so sent it to every schedulers def dispatch_global_command(self, command): for sched in self.conf.schedulerlinks: safe_print("Sending a command", command, 'to scheduler', sched) if sched.alive: #sched.run_external_command(command) sched.external_commands.append(command) # We need to get the first part, the command name def get_command_and_args(self, command): #safe_print("Trying to resolve", command) command = command.rstrip() elts = command.split(';') # danger!!! passive checkresults with perfdata part1 = elts[0] elts2 = part1.split(' ') #print "Elts2:", elts2 if len(elts2) != 2: safe_print("Malformed command", command) return None c_name = elts2[1] #safe_print("Get command name", c_name) if c_name not in ExternalCommandManager.commands: print "This command is not recognized, sorry" return None # Split again based on the number of args we expect. We cannot split # on every ; because this character may appear in the perfdata of # passive check results. entry = ExternalCommandManager.commands[c_name] # Look if the command is purely internal or not internal = False if 'internal' in entry and entry['internal']: internal = True numargs = len(entry['args']) if numargs and 'service' in entry['args']: numargs += 1 elts = command.split(';', numargs) print self.mode, entry['global'] if self.mode == 'dispatcher' and entry['global']: if not internal: print "This command is a global one, we resent it to all schedulers" return {'global' : True, 'cmd' : command} #print "Is global?", c_name, entry['global'] #print "Mode:", self.mode #print "This command have arguments:", entry['args'], len(entry['args']) args = [] i = 1 in_service = False tmp_host = '' try: for elt in elts[1:]: #safe_print("Searching for a new arg:", elt, i) val = elt.strip() if val[-1] == '\n': val = val[:-1] #safe_print("For command arg", val) if not in_service: type_searched = entry['args'][i-1] #safe_print("Search for a arg", type_searched) if type_searched == 'host': if self.mode == 'dispatcher': self.search_host_and_dispatch(val, command) return None h = self.hosts.find_by_name(val) if h is not None: args.append(h) elif type_searched == 'contact': c = self.contacts.find_by_name(val) if c is not None: args.append(c) elif type_searched == 'time_period': t = self.timeperiods.find_by_name(val) if t is not None: args.append(t) elif type_searched == 'to_bool': args.append(to_bool(val)) elif type_searched == 'to_int': args.append(to_int(val)) elif type_searched in ('author', None): args.append(val) elif type_searched == 'command': c = self.commands.find_by_name(val) if c is not None: # the find will be redone by # the commandCall creation, but != None # is usefull so a bad command will be catch args.append(val) elif type_searched == 'host_group': hg = self.hostgroups.find_by_name(val) if hg is not None: args.append(hg) elif type_searched == 'service_group': sg = self.servicegroups.find_by_name(val) if sg is not None: args.append(sg) elif type_searched == 'contact_group': cg = self.contact_groups.find_by_name(val) if cg is not None: args.append(cg) # special case: service are TWO args host;service, so one more loop # to get the two parts elif type_searched == 'service': in_service = True tmp_host = elt.strip() #safe_print("TMP HOST", tmp_host) if tmp_host[-1] == '\n': tmp_host = tmp_host[:-1] if self.mode == 'dispatcher': self.search_host_and_dispatch(tmp_host, command) return None i += 1 else: in_service = False srv_name = elt if srv_name[-1] == '\n': srv_name = srv_name[:-1] #safe_print("Got service full", tmp_host, srv_name) s = self.services.find_srv_by_name_and_hostname(tmp_host, srv_name) if s is not None: args.append(s) else: #error, must be logged logger.log("Warning: a command was received for service '%s' on host '%s', but the service could not be found!" % (srv_name, tmp_host)) except IndexError: safe_print("Sorry, the arguments are not corrects") return None safe_print('Finally got ARGS:', args) if len(args) == len(entry['args']): #safe_print("OK, we can call the command", c_name, "with", args) return {'global' : False, 'c_name' : c_name, 'args' : args} #f = getattr(self, c_name) #apply(f, args) else: safe_print("Sorry, the arguments are not corrects", args) return None # CHANGE_CONTACT_MODSATTR;<contact_name>;<value> def CHANGE_CONTACT_MODSATTR(self, contact, value): # TODO contact.modified_service_attributes = long(value) # CHANGE_CONTACT_MODHATTR;<contact_name>;<value> def CHANGE_CONTACT_MODHATTR(self, contact, value): # TODO contact.modified_host_attributes = long(value) # CHANGE_CONTACT_MODATTR;<contact_name>;<value> def CHANGE_CONTACT_MODATTR(self, contact, value): contact.modified_attributes = long(value) # CHANGE_CONTACT_HOST_NOTIFICATION_TIMEPERIOD;<contact_name>;<notification_timeperiod> def CHANGE_CONTACT_HOST_NOTIFICATION_TIMEPERIOD(self, contact, notification_timeperiod): contact.modified_host_attributes |= MODATTR_NOTIFICATION_TIMEPERIOD contact.host_notification_period = notification_timeperiod self.sched.get_and_register_status_brok(contact) #ADD_SVC_COMMENT;<host_name>;<service_description>;<persistent>;<author>;<comment> def ADD_SVC_COMMENT(self, service, persistent, author, comment): c = Comment(service, persistent, author, comment, 2, 1, 1, False, 0) service.add_comment(c) self.sched.add(c) #ADD_HOST_COMMENT;<host_name>;<persistent>;<author>;<comment> def ADD_HOST_COMMENT(self, host, persistent, author, comment): c = Comment(host, persistent, author, comment, 1, 1, 1, False, 0) host.add_comment(c) self.sched.add(c) #ACKNOWLEDGE_SVC_PROBLEM;<host_name>;<service_description>;<sticky>;<notify>;<persistent>;<author>;<comment> def ACKNOWLEDGE_SVC_PROBLEM(self, service, sticky, notify, persistent, author, comment): service.acknowledge_problem(sticky, notify, persistent, author, comment) #ACKNOWLEDGE_HOST_PROBLEM;<host_name>;<sticky>;<notify>;<persistent>;<author>;<comment> #TODO : add a better ACK management def ACKNOWLEDGE_HOST_PROBLEM(self, host, sticky, notify, persistent, author, comment): host.acknowledge_problem(sticky, notify, persistent, author, comment) #ACKNOWLEDGE_SVC_PROBLEM_EXPIRE;<host_name>;<service_description>;<sticky>;<notify>;<persistent>;<end_time>;<author>;<comment> def ACKNOWLEDGE_SVC_PROBLEM_EXPIRE(self, service, sticky, notify, persistent, end_time, author, comment): service.acknowledge_problem(sticky, notify, persistent, author, comment, end_time=end_time) #ACKNOWLEDGE_HOST_PROBLEM_EXPIRE;<host_name>;<sticky>;<notify>;<persistent>;<end_time>;<author>;<comment> #TODO : add a better ACK management def ACKNOWLEDGE_HOST_PROBLEM_EXPIRE(self, host, sticky, notify, persistent, end_time, author, comment): host.acknowledge_problem(sticky, notify, persistent, author, comment, end_time=end_time) # CHANGE_CONTACT_SVC_NOTIFICATION_TIMEPERIOD;<contact_name>;<notification_timeperiod> def CHANGE_CONTACT_SVC_NOTIFICATION_TIMEPERIOD(self, contact, notification_timeperiod): contact.modified_service_attributes |= MODATTR_NOTIFICATION_TIMEPERIOD contact.service_notification_period = notification_timeperiod self.sched.get_and_register_status_brok(contact) # CHANGE_CUSTOM_CONTACT_VAR;<contact_name>;<varname>;<varvalue> def CHANGE_CUSTOM_CONTACT_VAR(self, contact, varname, varvalue): contact.modified_attributes |= MODATTR_CUSTOM_VARIABLE contact.customs[varname.upper()] = varvalue # CHANGE_CUSTOM_HOST_VAR;<host_name>;<varname>;<varvalue> def CHANGE_CUSTOM_HOST_VAR(self, host, varname, varvalue): host.modified_attributes |= MODATTR_CUSTOM_VARIABLE host.customs[varname.upper()] = varvalue # CHANGE_CUSTOM_SVC_VAR;<host_name>;<service_description>;<varname>;<varvalue> def CHANGE_CUSTOM_SVC_VAR(self, service, varname, varvalue): service.modified_attributes |= MODATTR_CUSTOM_VARIABLE service.customs[varname.upper()] = varvalue # CHANGE_GLOBAL_HOST_EVENT_HANDLER;<event_handler_command> def CHANGE_GLOBAL_HOST_EVENT_HANDLER(self, event_handler_command): # TODO: MODATTR_EVENT_HANDLER_COMMAND pass # CHANGE_GLOBAL_SVC_EVENT_HANDLER;<event_handler_command> # TODO def CHANGE_GLOBAL_SVC_EVENT_HANDLER(self, event_handler_command): # TODO: MODATTR_EVENT_HANDLER_COMMAND pass # CHANGE_HOST_CHECK_COMMAND;<host_name>;<check_command> def CHANGE_HOST_CHECK_COMMAND(self, host, check_command): host.modified_attributes |= MODATTR_CHECK_COMMAND host.check_command = CommandCall(self.commands, check_command, poller_tag=host.poller_tag) self.sched.get_and_register_status_brok(host) # CHANGE_HOST_CHECK_TIMEPERIOD;<host_name>;<timeperiod> def CHANGE_HOST_CHECK_TIMEPERIOD(self, host, timeperiod): # TODO is timeperiod a string or a Timeperiod object? host.modified_attributes |= MODATTR_CHECK_TIMEPERIOD host.check_period = timeperiod self.sched.get_and_register_status_brok(service) # CHANGE_HOST_EVENT_HANDLER;<host_name>;<event_handler_command> def CHANGE_HOST_EVENT_HANDLER(self, host, event_handler_command): host.modified_attributes |= MODATTR_EVENT_HANDLER_COMMAND host.event_handler = CommandCall(self.commands, event_handler_command) self.sched.get_and_register_status_brok(host) # CHANGE_HOST_MODATTR;<host_name>;<value> def CHANGE_HOST_MODATTR(self, host, value): host.modified_attributes = long(value) # CHANGE_MAX_HOST_CHECK_ATTEMPTS;<host_name>;<check_attempts> def CHANGE_MAX_HOST_CHECK_ATTEMPTS(self, host, check_attempts): host.modified_attributes |= MODATTR_MAX_CHECK_ATTEMPTS host.max_check_attempts = check_attempts if host.state_type == 'HARD' and host.state == 'UP' and host.attempt > 1: host.attempt = host.max_check_attempts self.sched.get_and_register_status_brok(host) # CHANGE_MAX_SVC_CHECK_ATTEMPTS;<host_name>;<service_description>;<check_attempts> def CHANGE_MAX_SVC_CHECK_ATTEMPTS(self, service, check_attempts): service.modified_attributes |= MODATTR_MAX_CHECK_ATTEMPTS service.max_check_attempts = check_attempts if service.state_type == 'HARD' and service.state == 'OK' and service.attempt > 1: service.attempt = service.max_check_attempts self.sched.get_and_register_status_brok(service) # CHANGE_NORMAL_HOST_CHECK_INTERVAL;<host_name>;<check_interval> def CHANGE_NORMAL_HOST_CHECK_INTERVAL(self, host, check_interval): host.modified_attributes |= MODATTR_NORMAL_CHECK_INTERVAL old_interval = host.check_interval host.check_interval = check_interval # If there were no regular checks (interval=0), then schedule # a check immediately. if old_interval == 0 and host.checks_enabled: host.schedule(force=False, force_time=int(time.time())) self.sched.get_and_register_status_brok(host) # CHANGE_NORMAL_SVC_CHECK_INTERVAL;<host_name>;<service_description>;<check_interval> def CHANGE_NORMAL_HOST_CHECK_INTERVAL(self, service, check_interval): service.modified_attributes |= MODATTR_NORMAL_CHECK_INTERVAL old_interval = service.check_interval service.check_interval = check_interval # If there were no regular checks (interval=0), then schedule # a check immediately. if old_interval == 0 and service.checks_enabled: service.schedule(force=False, force_time=int(time.time())) self.sched.get_and_register_status_brok(service) # CHANGE_RETRY_HOST_CHECK_INTERVAL;<host_name>;<check_interval> def CHANGE_RETRY_HOST_CHECK_INTERVAL(self, host, check_interval): host.modified_attributes |= MODATTR_RETRY_CHECK_INTERVAL host.retry_interval = check_interval self.sched.get_and_register_status_brok(host) # CHANGE_RETRY_SVC_CHECK_INTERVAL;<host_name>;<service_description>;<check_interval> def CHANGE_RETRY_SVC_CHECK_INTERVAL(self, service, check_interval): service.modified_attributes |= MODATTR_RETRY_CHECK_INTERVAL service.retry_interval = check_interval self.sched.get_and_register_status_brok(service) # CHANGE_SVC_CHECK_COMMAND;<host_name>;<service_description>;<check_command> def CHANGE_SVC_CHECK_COMMAND(self, service, check_command): service.modified_attributes |= MODATTR_CHECK_COMMAND service.check_command = CommandCall(self.commands, check_command, poller_tag=service.poller_tag) self.sched.get_and_register_status_brok(service) # CHANGE_SVC_CHECK_TIMEPERIOD;<host_name>;<service_description>;<check_timeperiod> def CHANGE_SVC_CHECK_TIMEPERIOD(self, service, check_timeperiod): service.modified_attributes |= MODATTR_CHECK_TIMEPERIOD service.check_period = check_timeperiod self.sched.get_and_register_status_brok(service) # CHANGE_SVC_EVENT_HANDLER;<host_name>;<service_description>;<event_handler_command> def CHANGE_SVC_EVENT_HANDLER(self, service, event_handler_command): service.modified_attributes |= MODATTR_EVENT_HANDLER_COMMAND service.event_handler = CommandCall(self.commands, event_handler_command) self.sched.get_and_register_status_brok(service) # CHANGE_SVC_MODATTR;<host_name>;<service_description>;<value> def CHANGE_SVC_MODATTR(self, service, value): service.modified_attributes = long(value) # CHANGE_SVC_NOTIFICATION_TIMEPERIOD;<host_name>;<service_description>;<notification_timeperiod> def CHANGE_SVC_NOTIFICATION_TIMEPERIOD(self, service, notification_timeperiod): service.modified_attributes |= MODATTR_NOTIFICATION_TIMEPERIOD service.notification_period = notification_timeperiod self.sched.get_and_register_status_brok(service) # DELAY_HOST_NOTIFICATION;<host_name>;<notification_time> def DELAY_HOST_NOTIFICATION(self, host, notification_time): host.first_notification_delay = notification_time self.sched.get_and_register_status_brok(host) # DELAY_SVC_NOTIFICATION;<host_name>;<service_description>;<notification_time> def DELAY_SVC_NOTIFICATION(self, service, notification_time): service.first_notification_delay = notification_time self.sched.get_and_register_status_brok(service) # DEL_ALL_HOST_COMMENTS;<host_name> def DEL_ALL_HOST_COMMENTS(self, host): for c in host.comments: self.DEL_HOST_COMMENT(c.id) # DEL_ALL_HOST_COMMENTS;<host_name> def DEL_ALL_HOST_DOWNTIMES(self, host): for dt in host.downtimes: self.DEL_HOST_DOWNTIME(dt.id) # DEL_ALL_SVC_COMMENTS;<host_name>;<service_description> def DEL_ALL_SVC_COMMENTS(self, service): for c in service.comments: self.DEL_SVC_COMMENT(c.id) # DEL_ALL_SVC_COMMENTS;<host_name>;<service_description> def DEL_ALL_SVC_DOWNTIMES(self, service): for dt in service.downtimes: self.DEL_SVC_DOWNTIME(dt.id) # DEL_CONTACT_DOWNTIME;<downtime_id> def DEL_CONTACT_DOWNTIME(self, downtime_id): if downtime_id in self.sched.contact_downtimes: self.sched.contact_downtimes[downtime_id].cancel() # DEL_HOST_COMMENT;<comment_id> def DEL_HOST_COMMENT(self, comment_id): if comment_id in self.sched.comments: self.sched.comments[comment_id].can_be_deleted = True # DEL_HOST_DOWNTIME;<downtime_id> def DEL_HOST_DOWNTIME(self, downtime_id): if downtime_id in self.sched.downtimes: self.sched.downtimes[downtime_id].cancel() # DEL_SVC_COMMENT;<comment_id> def DEL_SVC_COMMENT(self, comment_id): if comment_id in self.sched.comments: self.sched.comments[comment_id].can_be_deleted = True # DEL_SVC_DOWNTIME;<downtime_id> def DEL_SVC_DOWNTIME(self, downtime_id): if downtime_id in self.sched.downtimes: self.sched.downtimes[downtime_id].cancel() # DISABLE_ALL_NOTIFICATIONS_BEYOND_HOST;<host_name> def DISABLE_ALL_NOTIFICATIONS_BEYOND_HOST(self, host): pass # DISABLE_CONTACTGROUP_HOST_NOTIFICATIONS;<contactgroup_name> def DISABLE_CONTACTGROUP_HOST_NOTIFICATIONS(self, contactgroup): for contact in contactgroup: self.DISABLE_CONTACT_HOST_NOTIFICATIONS(contact) # DISABLE_CONTACTGROUP_SVC_NOTIFICATIONS;<contactgroup_name> def DISABLE_CONTACTGROUP_SVC_NOTIFICATIONS(self, contactgroup): for contact in contactgroup: self.DISABLE_CONTACT_SVC_NOTIFICATIONS(contact) # DISABLE_CONTACT_HOST_NOTIFICATIONS;<contact_name> def DISABLE_CONTACT_HOST_NOTIFICATIONS(self, contact): if contact.host_notifications_enabled: contact.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED contact.host_notifications_enabled = False self.sched.get_and_register_status_brok(contact) # DISABLE_CONTACT_SVC_NOTIFICATIONS;<contact_name> def DISABLE_CONTACT_SVC_NOTIFICATIONS(self, contact): if contact.service_notifications_enabled: contact.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED contact.service_notifications_enabled = False self.sched.get_and_register_status_brok(contact) # DISABLE_EVENT_HANDLERS def DISABLE_EVENT_HANDLERS(self): if self.conf.enable_event_handlers: self.conf.modified_attributes |= MODATTR_EVENT_HANDLER_ENABLED self.conf.enable_event_handlers = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # DISABLE_FAILURE_PREDICTION def DISABLE_FAILURE_PREDICTION(self): if self.conf.enable_failure_prediction: self.conf.modified_attributes |= MODATTR_FAILURE_PREDICTION_ENABLED self.conf.enable_failure_prediction = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # DISABLE_FLAP_DETECTION def DISABLE_FLAP_DETECTION(self): if self.conf.enable_flap_detection: self.conf.modified_attributes |= MODATTR_FLAP_DETECTION_ENABLED self.conf.enable_flap_detection = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # DISABLE_HOSTGROUP_HOST_CHECKS;<hostgroup_name> def DISABLE_HOSTGROUP_HOST_CHECKS(self, hostgroup): for host in hostgroup: self.DISABLE_HOST_CHECK(host) # DISABLE_HOSTGROUP_HOST_NOTIFICATIONS;<hostgroup_name> def DISABLE_HOSTGROUP_HOST_NOTIFICATIONS(self, hostgroup): for host in hostgroup: self.DISABLE_HOST_NOTIFICATIONS(host) # DISABLE_HOSTGROUP_PASSIVE_HOST_CHECKS;<hostgroup_name> def DISABLE_HOSTGROUP_PASSIVE_HOST_CHECKS(self, hostgroup): for host in hostgroup: self.DISABLE_PASSIVE_HOST_CHECKS(host) # DISABLE_HOSTGROUP_PASSIVE_SVC_CHECKS;<hostgroup_name> def DISABLE_HOSTGROUP_PASSIVE_SVC_CHECKS(self, hostgroup): for host in hostgroup: for service in host.services: self.DISABLE_PASSIVE_SVC_CHECKS(service) # DISABLE_HOSTGROUP_SVC_CHECKS;<hostgroup_name> def DISABLE_HOSTGROUP_SVC_CHECKS(self, hostgroup): for host in hostgroup: for service in host.services: self.DISABLE_SVC_CHECK(service) # DISABLE_HOSTGROUP_SVC_NOTIFICATIONS;<hostgroup_name> def DISABLE_HOSTGROUP_SVC_NOTIFICATIONS(self, hostgroup): for host in hostgroup: for service in host.services: self.DISABLE_SVC_NOTIFICATIONS(service) # DISABLE_HOST_AND_CHILD_NOTIFICATIONS;<host_name> def DISABLE_HOST_AND_CHILD_NOTIFICATIONS(self, host): pass # DISABLE_HOST_CHECK;<host_name> def DISABLE_HOST_CHECK(self, host): if host.active_checks_enabled: host.modified_attributes |= MODATTR_ACTIVE_CHECKS_ENABLED host.disable_active_checks() self.sched.get_and_register_status_brok(host) # DISABLE_HOST_EVENT_HANDLER;<host_name> def DISABLE_HOST_EVENT_HANDLER(self, host): if host.event_handler_enabled: host.modified_attributes |= MODATTR_EVENT_HANDLER_ENABLED host.event_handler_enabled = False self.sched.get_and_register_status_brok(host) # DISABLE_HOST_FLAP_DETECTION;<host_name> def DISABLE_HOST_FLAP_DETECTION(self, host): if host.flap_detection_enabled: host.modified_attributes |= MODATTR_FLAP_DETECTION_ENABLED host.flap_detection_enabled = False self.sched.get_and_register_status_brok(host) # DISABLE_HOST_FRESHNESS_CHECKS def DISABLE_HOST_FRESHNESS_CHECKS(self, host): if host.check_freshness: host.modified_attributes |= MODATTR_FRESHNESS_CHECKS_ENABLED host.check_freshness = False self.sched.get_and_register_status_brok(host) # DISABLE_HOST_NOTIFICATIONS;<host_name> def DISABLE_HOST_NOTIFICATIONS(self, host): if host.notifications_enabled: host.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED host.notifications_enabled = False self.sched.get_and_register_status_brok(host) # DISABLE_HOST_SVC_CHECKS;<host_name> def DISABLE_HOST_SVC_CHECKS(self, host): for s in host.services: self.DISABLE_SVC_CHECK(s) # DISABLE_HOST_SVC_NOTIFICATIONS;<host_name> def DISABLE_HOST_SVC_NOTIFICATIONS(self, host): for s in host.services: self.DISABLE_SVC_NOTIFICATIONS(s) self.sched.get_and_register_status_brok(s) # DISABLE_NOTIFICATIONS def DISABLE_NOTIFICATIONS(self): if self.conf.enable_notifications: self.conf.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED self.conf.enable_notifications = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # DISABLE_PASSIVE_HOST_CHECKS;<host_name> def DISABLE_PASSIVE_HOST_CHECKS(self, host): if host.passive_checks_enabled: host.modified_attributes |= MODATTR_PASSIVE_CHECKS_ENABLED host.passive_checks_enabled = False self.sched.get_and_register_status_brok(host) # DISABLE_PASSIVE_SVC_CHECKS;<host_name>;<service_description> def DISABLE_PASSIVE_SVC_CHECKS(self, service): if service.passive_checks_enabled: service.modified_attributes |= MODATTR_PASSIVE_CHECKS_ENABLED service.passive_checks_enabled = False self.sched.get_and_register_status_brok(service) # DISABLE_PERFORMANCE_DATA def DISABLE_PERFORMANCE_DATA(self): if self.conf.process_performance_data: self.conf.modified_attributes |= MODATTR_PERFORMANCE_DATA_ENABLED self.conf.process_performance_data = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # DISABLE_SERVICEGROUP_HOST_CHECKS;<servicegroup_name> def DISABLE_SERVICEGROUP_HOST_CHECKS(self, servicegroup): for service in servicegroup: self.DISABLE_HOST_CHECK(service.host) # DISABLE_SERVICEGROUP_HOST_NOTIFICATIONS;<servicegroup_name> def DISABLE_SERVICEGROUP_HOST_NOTIFICATIONS(self, servicegroup): for service in servicegroup: self.DISABLE_HOST_NOTIFICATIONS(service.host) # DISABLE_SERVICEGROUP_PASSIVE_HOST_CHECKS;<servicegroup_name> def DISABLE_SERVICEGROUP_PASSIVE_HOST_CHECKS(self, servicegroup): for service in servicegroup: self.DISABLE_PASSIVE_HOST_CHECKS(service.host) # DISABLE_SERVICEGROUP_PASSIVE_SVC_CHECKS;<servicegroup_name> def DISABLE_SERVICEGROUP_PASSIVE_SVC_CHECKS(self, servicegroup): for service in servicegroup: self.DISABLE_PASSIVE_SVC_CHECKS(service) # DISABLE_SERVICEGROUP_SVC_CHECKS;<servicegroup_name> def DISABLE_SERVICEGROUP_SVC_CHECKS(self, servicegroup): for service in servicegroup: self.DISABLE_SVC_CHECK(service) # DISABLE_SERVICEGROUP_SVC_NOTIFICATIONS;<servicegroup_name> def DISABLE_SERVICEGROUP_SVC_NOTIFICATIONS(self, servicegroup): for service in servicegroup: self.DISABLE_SVC_NOTIFICATIONS(service) # DISABLE_SERVICE_FLAP_DETECTION;<host_name>;<service_description> def DISABLE_SERVICE_FLAP_DETECTION(self, service): if service.flap_detection_enabled: service.modified_attributes |= MODATTR_FLAP_DETECTION_ENABLED service.flap_detection_enabled = False self.sched.get_and_register_status_brok(service) # DISABLE_SERVICE_FRESHNESS_CHECKS def DISABLE_SERVICE_FRESHNESS_CHECKS(self): if self.conf.check_service_freshness: self.conf.modified_attributes |= MODATTR_FRESHNESS_CHECKS_ENABLED self.conf.check_service_freshness = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # DISABLE_SVC_CHECK;<host_name>;<service_description> def DISABLE_SVC_CHECK(self, service): if service.active_checks_enabled: service.disable_active_checks() service.modified_attributes |= MODATTR_ACTIVE_CHECKS_ENABLED self.sched.get_and_register_status_brok(service) # DISABLE_SVC_EVENT_HANDLER;<host_name>;<service_description> def DISABLE_SVC_EVENT_HANDLER(self, service): if service.event_handler_enabled: service.modified_attributes |= MODATTR_EVENT_HANDLER_ENABLED service.event_handler_enabled = False self.sched.get_and_register_status_brok(service) # DISABLE_SVC_FLAP_DETECTION;<host_name>;<service_description> def DISABLE_SVC_FLAP_DETECTION(self, service): if service.flap_detection_enabled: service.modified_attributes |= MODATTR_FLAP_DETECTION_ENABLED service.flap_detection_enabled = False self.sched.get_and_register_status_brok(service) # DISABLE_SVC_NOTIFICATIONS;<host_name>;<service_description> def DISABLE_SVC_NOTIFICATIONS(self, service): if service.notifications_enabled: service.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED service.notifications_enabled = False self.sched.get_and_register_status_brok(service) # ENABLE_ALL_NOTIFICATIONS_BEYOND_HOST;<host_name> def ENABLE_ALL_NOTIFICATIONS_BEYOND_HOST(self, host): pass # ENABLE_CONTACTGROUP_HOST_NOTIFICATIONS;<contactgroup_name> def ENABLE_CONTACTGROUP_HOST_NOTIFICATIONS(self, contactgroup): for contact in contactgroup: self.ENABLE_CONTACT_HOST_NOTIFICATIONS(contact) # ENABLE_CONTACTGROUP_SVC_NOTIFICATIONS;<contactgroup_name> def ENABLE_CONTACTGROUP_SVC_NOTIFICATIONS(self, contactgroup): for contact in contactgroup: self.ENABLE_CONTACT_SVC_NOTIFICATIONS(contact) # ENABLE_CONTACT_HOST_NOTIFICATIONS;<contact_name> def ENABLE_CONTACT_HOST_NOTIFICATIONS(self, contact): if not contact.host_notifications_enabled: contact.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED contact.host_notifications_enabled = True self.sched.get_and_register_status_brok(contact) # ENABLE_CONTACT_SVC_NOTIFICATIONS;<contact_name> def ENABLE_CONTACT_SVC_NOTIFICATIONS(self, contact): if not contact.service_notifications_enabled: contact.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED contact.service_notifications_enabled = True self.sched.get_and_register_status_brok(contact) # ENABLE_EVENT_HANDLERS def ENABLE_EVENT_HANDLERS(self): if not self.conf.enable_event_handlers: self.conf.modified_attributes |= MODATTR_EVENT_HANDLER_ENABLED self.conf.enable_event_handlers = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # ENABLE_FAILURE_PREDICTION def ENABLE_FAILURE_PREDICTION(self): if not self.conf.enable_failure_prediction: self.conf.modified_attributes |= MODATTR_FAILURE_PREDICTION_ENABLED self.conf.enable_failure_prediction = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # ENABLE_FLAP_DETECTION def ENABLE_FLAP_DETECTION(self): if not self.conf.enable_flap_detection: self.conf.modified_attributes |= MODATTR_FLAP_DETECTION_ENABLED self.conf.enable_flap_detection = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # ENABLE_HOSTGROUP_HOST_CHECKS;<hostgroup_name> def ENABLE_HOSTGROUP_HOST_CHECKS(self, hostgroup): for host in hostgroup: self.ENABLE_HOST_CHECK(host) # ENABLE_HOSTGROUP_HOST_NOTIFICATIONS;<hostgroup_name> def ENABLE_HOSTGROUP_HOST_NOTIFICATIONS(self, hostgroup): for host in hostgroup: self.ENABLE_HOST_NOTIFICATIONS(host) # ENABLE_HOSTGROUP_PASSIVE_HOST_CHECKS;<hostgroup_name> def ENABLE_HOSTGROUP_PASSIVE_HOST_CHECKS(self, hostgroup): for host in hostgroup: self.ENABLE_PASSIVE_HOST_CHECKS(host) # ENABLE_HOSTGROUP_PASSIVE_SVC_CHECKS;<hostgroup_name> def ENABLE_HOSTGROUP_PASSIVE_SVC_CHECKS(self, hostgroup): for host in hostgroup: for service in host.services: self.ENABLE_PASSIVE_SVC_CHECKS(service) # ENABLE_HOSTGROUP_SVC_CHECKS;<hostgroup_name> def ENABLE_HOSTGROUP_SVC_CHECKS(self, hostgroup): for host in hostgroup: for service in host.services: self.ENABLE_SVC_CHECK(service) # ENABLE_HOSTGROUP_SVC_NOTIFICATIONS;<hostgroup_name> def ENABLE_HOSTGROUP_SVC_NOTIFICATIONS(self, hostgroup): for host in hostgroup: for service in host.services: self.ENABLE_SVC_NOTIFICATIONS(service) # ENABLE_HOST_AND_CHILD_NOTIFICATIONS;<host_name> def ENABLE_HOST_AND_CHILD_NOTIFICATIONS(self, host): pass # ENABLE_HOST_CHECK;<host_name> def ENABLE_HOST_CHECK(self, host): if not host.active_checks_enabled: host.active_checks_enabled = True host.modified_attributes |= MODATTR_ACTIVE_CHECKS_ENABLED self.sched.get_and_register_status_brok(host) # ENABLE_HOST_EVENT_HANDLER;<host_name> def ENABLE_HOST_EVENT_HANDLER(self, host): if not host.event_handler_enabled: host.modified_attributes |= MODATTR_EVENT_HANDLER_ENABLED host.event_handler_enabled = True self.sched.get_and_register_status_brok(host) # ENABLE_HOST_FLAP_DETECTION;<host_name> def ENABLE_HOST_FLAP_DETECTION(self, host): if not host.flap_detection_enabled: host.modified_attributes |= MODATTR_FLAP_DETECTION_ENABLED host.flap_detection_enabled = True self.sched.get_and_register_status_brok(host) # ENABLE_HOST_FRESHNESS_CHECKS def ENABLE_HOST_FRESHNESS_CHECKS(self): if not host.check_freshness: host.modified_attributes |= MODATTR_FRESHNESS_CHECKS_ENABLED host.check_freshness = True self.sched.get_and_register_status_brok(host) # ENABLE_HOST_NOTIFICATIONS;<host_name> def ENABLE_HOST_NOTIFICATIONS(self, host): if not host.notifications_enabled: host.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED host.notifications_enabled = True self.sched.get_and_register_status_brok(host) # ENABLE_HOST_SVC_CHECKS;<host_name> def ENABLE_HOST_SVC_CHECKS(self, host): for s in host.services: self.ENABLE_SVC_CHECK(s) # ENABLE_HOST_SVC_NOTIFICATIONS;<host_name> def ENABLE_HOST_SVC_NOTIFICATIONS(self, host): for s in host.services: self.ENABLE_SVC_NOTIFICATIONS(s) self.sched.get_and_register_status_brok(s) # ENABLE_NOTIFICATIONS def ENABLE_NOTIFICATIONS(self): if not self.conf.enable_notifications: self.conf.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED self.conf.enable_notifications = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # ENABLE_PASSIVE_HOST_CHECKS;<host_name> def ENABLE_PASSIVE_HOST_CHECKS(self, host): if not host.passive_checks_enabled: host.modified_attributes |= MODATTR_PASSIVE_CHECKS_ENABLED host.passive_checks_enabled = True self.sched.get_and_register_status_brok(host) # ENABLE_PASSIVE_SVC_CHECKS;<host_name>;<service_description> def ENABLE_PASSIVE_SVC_CHECKS(self, service): if not service.passive_checks_enabled: service.modified_attributes |= MODATTR_PASSIVE_CHECKS_ENABLED service.passive_checks_enabled = True self.sched.get_and_register_status_brok(service) # ENABLE_PERFORMANCE_DATA def ENABLE_PERFORMANCE_DATA(self): if not self.conf.process_performance_data: self.conf.modified_attributes |= MODATTR_PERFORMANCE_DATA_ENABLED self.conf.process_performance_data = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # ENABLE_SERVICEGROUP_HOST_CHECKS;<servicegroup_name> def ENABLE_SERVICEGROUP_HOST_CHECKS(self, servicegroup): for service in servicegroup: self.ENABLE_HOST_CHECK(service.host) # ENABLE_SERVICEGROUP_HOST_NOTIFICATIONS;<servicegroup_name> def ENABLE_SERVICEGROUP_HOST_NOTIFICATIONS(self, servicegroup): for service in servicegroup: self.ENABLE_HOST_NOTIFICATIONS(service.host) # ENABLE_SERVICEGROUP_PASSIVE_HOST_CHECKS;<servicegroup_name> def ENABLE_SERVICEGROUP_PASSIVE_HOST_CHECKS(self, servicegroup): for service in servicegroup: self.ENABLE_PASSIVE_HOST_CHECKS(service.host) # ENABLE_SERVICEGROUP_PASSIVE_SVC_CHECKS;<servicegroup_name> def ENABLE_SERVICEGROUP_PASSIVE_SVC_CHECKS(self, servicegroup): for service in servicegroup: self.ENABLE_PASSIVE_SVC_CHECKS(service) # ENABLE_SERVICEGROUP_SVC_CHECKS;<servicegroup_name> def ENABLE_SERVICEGROUP_SVC_CHECKS(self, servicegroup): for service in servicegroup: self.ENABLE_SVC_CHECK(service) # ENABLE_SERVICEGROUP_SVC_NOTIFICATIONS;<servicegroup_name> def ENABLE_SERVICEGROUP_SVC_NOTIFICATIONS(self, servicegroup): for service in servicegroup: self.ENABLE_SVC_NOTIFICATIONS(service) # ENABLE_SERVICE_FRESHNESS_CHECKS def ENABLE_SERVICE_FRESHNESS_CHECKS(self): if not self.conf.check_service_freshness: self.conf.modified_attributes |= MODATTR_FRESHNESS_CHECKS_ENABLED self.conf.check_service_freshness = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # ENABLE_SVC_CHECK;<host_name>;<service_description> def ENABLE_SVC_CHECK(self, service): if not service.active_checks_enabled: service.modified_attributes |= MODATTR_ACTIVE_CHECKS_ENABLED service.active_checks_enabled = True self.sched.get_and_register_status_brok(service) # ENABLE_SVC_EVENT_HANDLER;<host_name>;<service_description> def ENABLE_SVC_EVENT_HANDLER(self, service): if not service.event_handler_enabled: service.modified_attributes |= MODATTR_EVENT_HANDLER_ENABLED service.event_handler_enabled = True self.sched.get_and_register_status_brok(service) # ENABLE_SVC_FLAP_DETECTION;<host_name>;<service_description> def ENABLE_SVC_FLAP_DETECTION(self, service): if not service.flap_detection_enabled: service.modified_attributes |= MODATTR_FLAP_DETECTION_ENABLED service.flap_detection_enabled = True self.sched.get_and_register_status_brok(service) # ENABLE_SVC_NOTIFICATIONS;<host_name>;<service_description> def ENABLE_SVC_NOTIFICATIONS(self, service): if not service.notifications_enabled: service.modified_attributes |= MODATTR_NOTIFICATIONS_ENABLED service.notifications_enabled = True self.sched.get_and_register_status_brok(service) # PROCESS_FILE;<file_name>;<delete> def PROCESS_FILE(self, file_name, delete): pass # TODO : say that check is PASSIVE # PROCESS_HOST_CHECK_RESULT;<host_name>;<status_code>;<plugin_output> def PROCESS_HOST_CHECK_RESULT(self, host, status_code, plugin_output): #raise a PASSIVE check only if needed if self.conf.log_passive_checks: logger.log('PASSIVE HOST CHECK: %s;%d;%s' % (host.get_name(), status_code, plugin_output)) now = time.time() cls = host.__class__ # If globally disable OR locally, do not launch if cls.accept_passive_checks and host.passive_checks_enabled: i = host.launch_check(now, force=True) for chk in host.actions: if chk.id == i: c = chk # Now we 'transform the check into a result' # So exit_status, output and status is eaten by the host c.exit_status = status_code c.get_outputs(plugin_output, host.max_plugins_output_length) c.status = 'waitconsume' c.check_time = now self.sched.nb_check_received += 1 # Ok now this result will be read by scheduler the next loop # PROCESS_SERVICE_CHECK_RESULT;<host_name>;<service_description>;<return_code>;<plugin_output> def PROCESS_SERVICE_CHECK_RESULT(self, service, return_code, plugin_output): # raise a PASSIVE check only if needed if self.conf.log_passive_checks: logger.log('PASSIVE SERVICE CHECK: %s;%s;%d;%s' % (service.host.get_name(), service.get_name(), return_code, plugin_output)) now = time.time() cls = service.__class__ # If globally disable OR locally, do not launch if cls.accept_passive_checks and service.passive_checks_enabled: i = service.launch_check(now, force=True) for chk in service.actions: if chk.id == i: c = chk # Now we 'transform the check into a result' # So exit_status, output and status is eaten by the service c.exit_status = return_code c.get_outputs(plugin_output, service.max_plugins_output_length) c.status = 'waitconsume' c.check_time = now self.sched.nb_check_received += 1 #Ok now this result will be reap by scheduler the next loop # READ_STATE_INFORMATION def READ_STATE_INFORMATION(self): pass # REMOVE_HOST_ACKNOWLEDGEMENT;<host_name> def REMOVE_HOST_ACKNOWLEDGEMENT(self, host): host.unacknowledge_problem() # REMOVE_SVC_ACKNOWLEDGEMENT;<host_name>;<service_description> def REMOVE_SVC_ACKNOWLEDGEMENT(self, service): service.unacknowledge_problem() # RESTART_PROGRAM def RESTART_PROGRAM(self): pass # SAVE_STATE_INFORMATION def SAVE_STATE_INFORMATION(self): pass # SCHEDULE_AND_PROPAGATE_HOST_DOWNTIME;<host_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> def SCHEDULE_AND_PROPAGATE_HOST_DOWNTIME(self, host, start_time, end_time, fixed, trigger_id, duration, author, comment): pass # SCHEDULE_AND_PROPAGATE_TRIGGERED_HOST_DOWNTIME;<host_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> def SCHEDULE_AND_PROPAGATE_TRIGGERED_HOST_DOWNTIME(self, host, start_time, end_time, fixed, trigger_id, duration, author, comment): pass # SCHEDULE_CONTACT_DOWNTIME;<contact_name>;<start_time>;<end_time>;<author>;<comment> def SCHEDULE_CONTACT_DOWNTIME(self, contact, start_time, end_time, author, comment): dt = ContactDowntime(contact, start_time, end_time, author, comment) contact.add_downtime(dt) self.sched.add(dt) self.sched.get_and_register_status_brok(contact) # SCHEDULE_FORCED_HOST_CHECK;<host_name>;<check_time> def SCHEDULE_FORCED_HOST_CHECK(self, host, check_time): host.schedule(force=True, force_time=check_time) self.sched.get_and_register_status_brok(host) # SCHEDULE_FORCED_HOST_SVC_CHECKS;<host_name>;<check_time> def SCHEDULE_FORCED_HOST_SVC_CHECKS(self, host, check_time): for s in host.services: self.SCHEDULE_FORCED_SVC_CHECK(s, check_time) self.sched.get_and_register_status_brok(s) # SCHEDULE_FORCED_SVC_CHECK;<host_name>;<service_description>;<check_time> def SCHEDULE_FORCED_SVC_CHECK(self, service, check_time): service.schedule(force=True, force_time=check_time) self.sched.get_and_register_status_brok(service) # SCHEDULE_HOSTGROUP_HOST_DOWNTIME;<hostgroup_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> def SCHEDULE_HOSTGROUP_HOST_DOWNTIME(self, hostgroup, start_time, end_time, fixed, trigger_id, duration, author, comment): pass # SCHEDULE_HOSTGROUP_SVC_DOWNTIME;<hostgroup_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> def SCHEDULE_HOSTGROUP_SVC_DOWNTIME(self, hostgroup, start_time, end_time, fixed, trigger_id, duration, author, comment): pass # SCHEDULE_HOST_CHECK;<host_name>;<check_time> def SCHEDULE_HOST_CHECK(self, host, check_time): host.schedule(force=False, force_time=check_time) self.sched.get_and_register_status_brok(host) # SCHEDULE_HOST_DOWNTIME;<host_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> def SCHEDULE_HOST_DOWNTIME(self, host, start_time, end_time, fixed, trigger_id, duration, author, comment): dt = Downtime(host, start_time, end_time, fixed, trigger_id, duration, author, comment) host.add_downtime(dt) self.sched.add(dt) self.sched.get_and_register_status_brok(host) if trigger_id != 0 and trigger_id in self.sched.downtimes: self.sched.downtimes[trigger_id].trigger_me(dt) # SCHEDULE_HOST_SVC_CHECKS;<host_name>;<check_time> def SCHEDULE_HOST_SVC_CHECKS(self, host, check_time): for s in host.services: self.SCHEDULE_SVC_CHECK(s, check_time) self.sched.get_and_register_status_brok(s) # SCHEDULE_HOST_SVC_DOWNTIME;<host_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> def SCHEDULE_HOST_SVC_DOWNTIME(self, host, start_time, end_time, fixed, trigger_id, duration, author, comment): for s in host.services: self.SCHEDULE_SVC_DOWNTIME(s, start_time, end_time, fixed, trigger_id, duration, author, comment) # SCHEDULE_SERVICEGROUP_HOST_DOWNTIME;<servicegroup_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> def SCHEDULE_SERVICEGROUP_HOST_DOWNTIME(self, servicegroup, start_time, end_time, fixed, trigger_id, duration, author, comment): for h in [s.host for s in servicegroup.get_services()]: self.SCHEDULE_HOST_DOWNTIME(h, start_time, end_time, fixed, trigger_id, duration, author, comment) # SCHEDULE_SERVICEGROUP_SVC_DOWNTIME;<servicegroup_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> def SCHEDULE_SERVICEGROUP_SVC_DOWNTIME(self, servicegroup, start_time, end_time, fixed, trigger_id, duration, author, comment): for s in servicegroup.get_services(): self.SCHEDULE_SVC_DOWNTIME(s, start_time, end_time, fixed, trigger_id, duration, author, comment) # SCHEDULE_SVC_CHECK;<host_name>;<service_description>;<check_time> def SCHEDULE_SVC_CHECK(self, service, check_time): service.schedule(force=False, force_time=check_time) self.sched.get_and_register_status_brok(service) # SCHEDULE_SVC_DOWNTIME;<host_name>;<service_desription><start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> def SCHEDULE_SVC_DOWNTIME(self, service, start_time, end_time, fixed, trigger_id, duration, author, comment): dt = Downtime(service, start_time, end_time, fixed, trigger_id, duration, author, comment) service.add_downtime(dt) self.sched.add(dt) self.sched.get_and_register_status_brok(service) if trigger_id != 0 and trigger_id in self.sched.downtimes: self.sched.downtimes[trigger_id].trigger_me(dt) # SEND_CUSTOM_HOST_NOTIFICATION;<host_name>;<options>;<author>;<comment> def SEND_CUSTOM_HOST_NOTIFICATION(self, host, options, author, comment): pass # SEND_CUSTOM_SVC_NOTIFICATION;<host_name>;<service_description>;<options>;<author>;<comment> def SEND_CUSTOM_SVC_NOTIFICATION(self, service, options, author, comment): pass # SET_HOST_NOTIFICATION_NUMBER;<host_name>;<notification_number> def SET_HOST_NOTIFICATION_NUMBER(self, host, notification_number): pass # SET_SVC_NOTIFICATION_NUMBER;<host_name>;<service_description>;<notification_number> def SET_SVC_NOTIFICATION_NUMBER(self, service, notification_number): pass # SHUTDOWN_PROGRAM def SHUTDOWN_PROGRAM(self): pass # START_ACCEPTING_PASSIVE_HOST_CHECKS def START_ACCEPTING_PASSIVE_HOST_CHECKS(self): if not self.conf.accept_passive_host_checks: self.conf.modified_attributes |= MODATTR_PASSIVE_CHECKS_ENABLED self.conf.accept_passive_host_checks = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # START_ACCEPTING_PASSIVE_SVC_CHECKS def START_ACCEPTING_PASSIVE_SVC_CHECKS(self): if not self.conf.accept_passive_service_checks: self.conf.modified_attributes |= MODATTR_PASSIVE_CHECKS_ENABLED self.conf.accept_passive_service_checks = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # START_EXECUTING_HOST_CHECKS def START_EXECUTING_HOST_CHECKS(self): if not self.conf.execute_host_checks: self.conf.modified_attributes |= MODATTR_ACTIVE_CHECKS_ENABLED self.conf.execute_host_checks = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # START_EXECUTING_SVC_CHECKS def START_EXECUTING_SVC_CHECKS(self): if not self.conf.execute_service_checks: self.conf.modified_attributes |= MODATTR_ACTIVE_CHECKS_ENABLED self.conf.execute_service_checks = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # START_OBSESSING_OVER_HOST;<host_name> def START_OBSESSING_OVER_HOST(self, host): if not host.obsess_over_host: host.modified_attributes |= MODATTR_OBSESSIVE_HANDLER_ENABLED host.obsess_over_host = True self.sched.get_and_register_status_brok(host) # START_OBSESSING_OVER_HOST_CHECKS def START_OBSESSING_OVER_HOST_CHECKS(self): if not self.conf.obsess_over_hosts: self.conf.modified_attributes |= MODATTR_OBSESSIVE_HANDLER_ENABLED self.conf.obsess_over_hosts = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # START_OBSESSING_OVER_SVC;<host_name>;<service_description> def START_OBSESSING_OVER_SVC(self, service): if not service.obsess_over_service: service.modified_attributes |= MODATTR_OBSESSIVE_HANDLER_ENABLED service.obsess_over_service = True self.sched.get_and_register_status_brok(service) # START_OBSESSING_OVER_SVC_CHECKS def START_OBSESSING_OVER_SVC_CHECKS(self): if not self.conf.obsess_over_services: self.conf.modified_attributes |= MODATTR_OBSESSIVE_HANDLER_ENABLED self.conf.obsess_over_services = True self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # STOP_ACCEPTING_PASSIVE_HOST_CHECKS def STOP_ACCEPTING_PASSIVE_HOST_CHECKS(self): if self.conf.accept_passive_host_checks: self.conf.modified_attributes |= MODATTR_PASSIVE_CHECKS_ENABLED self.conf.accept_passive_host_checks = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # STOP_ACCEPTING_PASSIVE_SVC_CHECKS def STOP_ACCEPTING_PASSIVE_SVC_CHECKS(self): if self.conf.accept_passive_service_checks: self.conf.modified_attributes |= MODATTR_PASSIVE_CHECKS_ENABLED self.conf.accept_passive_service_checks = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # STOP_EXECUTING_HOST_CHECKS def STOP_EXECUTING_HOST_CHECKS(self): if self.conf.execute_host_checks: self.conf.modified_attributes |= MODATTR_ACTIVE_CHECKS_ENABLED self.conf.execute_host_checks = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # STOP_EXECUTING_SVC_CHECKS def STOP_EXECUTING_SVC_CHECKS(self): if self.conf.execute_service_checks: self.conf.modified_attributes |= MODATTR_ACTIVE_CHECKS_ENABLED self.conf.execute_service_checks = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # STOP_OBSESSING_OVER_HOST;<host_name> def STOP_OBSESSING_OVER_HOST(self, host): if host.obsess_over_host: host.modified_attributes |= MODATTR_OBSESSIVE_HANDLER_ENABLED host.obsess_over_host = False self.sched.get_and_register_status_brok(host) # STOP_OBSESSING_OVER_HOST_CHECKS def STOP_OBSESSING_OVER_HOST_CHECKS(self): if self.conf.obsess_over_hosts: self.conf.modified_attributes |= MODATTR_OBSESSIVE_HANDLER_ENABLED self.conf.obsess_over_hosts = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() # STOP_OBSESSING_OVER_SVC;<host_name>;<service_description> def STOP_OBSESSING_OVER_SVC(self, service): if service.obsess_over_service: service.modified_attributes |= MODATTR_OBSESSIVE_HANDLER_ENABLED service.obsess_over_service = False self.sched.get_and_register_status_brok(service) # STOP_OBSESSING_OVER_SVC_CHECKS def STOP_OBSESSING_OVER_SVC_CHECKS(self): if self.conf.obsess_over_services: self.conf.modified_attributes |= MODATTR_OBSESSIVE_HANDLER_ENABLED self.conf.obsess_over_services = False self.conf.explode_global_conf() self.sched.get_and_register_update_program_status_brok() ### Now the shinken specific ones # LAUNCH_SVC_EVENT_HANDLER;<host_name>;<service_description> def LAUNCH_SVC_EVENT_HANDLER(self, service): service.get_event_handlers(externalcmd=True) # LAUNCH_SVC_EVENT_HANDLER;<host_name>;<service_description> def LAUNCH_HOST_EVENT_HANDLER(self, host): host.get_event_handlers(externalcmd=True) # ADD_SIMPLE_HOST_DEPENDENCY;<host_name>;<host_name> def ADD_SIMPLE_HOST_DEPENDENCY(self, son, father): if not son.is_linked_with_host(father): print "Doing simple link between", son.get_name(), 'and', father.get_name() # Flag them so the modules will know that a topology change # happened son.topology_change = True father.topology_change = True # Now do the work # Add a dep link between the son and the father son.add_host_act_dependency(father, ['w', 'u', 'd'], None, True) self.sched.get_and_register_status_brok(son) self.sched.get_and_register_status_brok(father) # ADD_SIMPLE_HOST_DEPENDENCY;<host_name>;<host_name> def DEL_HOST_DEPENDENCY(self, son, father): if son.is_linked_with_host(father): print "removing simple link between", son.get_name(), 'and', father.get_name() # Flag them so the modules will know that a topology change # happened son.topology_change = True father.topology_change = True # Now do the work son.del_host_act_dependency(father) self.sched.get_and_register_status_brok(son) self.sched.get_and_register_status_brok(father) # ADD_SIMPLE_POLLER;realm_name;poller_name;address;port def ADD_SIMPLE_POLLER(self, realm_name, poller_name, address, port): print "I need to add the poller", realm_name, poller_name, address, port # First we look for the realm r = self.conf.realms.find_by_name(realm_name) if r is None: print "Sorry, the realm %s is unknown" % realm_name return print "We found the realm", r # TODO : backport this in the config class? # We create the PollerLink object t = {'poller_name' : poller_name, 'address' : address, 'port' : port} p = PollerLink(t) p.fill_default() p.pythonize() p.prepare_for_conf() parameters = {'max_plugins_output_length' : self.conf.max_plugins_output_length} p.add_global_conf_parameters(parameters) self.arbiter.conf.pollers[p.id] = p self.arbiter.dispatcher.elements.append(p) self.arbiter.dispatcher.satellites.append(p) r.pollers.append(p) r.count_pollers() r.fill_potential_pollers() print "Poller %s added" % poller_name print "Potential", r.get_potential_satellites_by_type('poller') if __name__ == '__main__': FIFO_PATH = '/tmp/my_fifo' if os.path.exists(FIFO_PATH): os.unlink(FIFO_PATH) if not os.path.exists(FIFO_PATH): os.umask(0) os.mkfifo(FIFO_PATH, 0660) my_fifo = open(FIFO_PATH, 'w+') print "my_fifo:", my_fifo print open(FIFO_PATH, 'r').readline()
agpl-3.0
-5,605,207,427,382,680,000
47.838409
164
0.630967
false
3.566028
false
false
false
carawalter/RiverCorridorPolygons
ManagementInterface.py
1
19417
################################################################################### # ArcGIS Interface Module for management tools # This module includes classes to interface with ArcGIS. # The class provides insulation from ArcGIS changes and provides friendlier error # messages. # Original Author: Jim Graham # Date: 4th of November, 2011 # # Modified by Cara Walter # Modified: 2/23/2013 # Most descriptions directly from arcpy documentation ################################################################################### import arcpy # import ArcGIS Python bindings ################################################################################### # Class to interface with data management ################################################################################### class ManagementInterface: # class to interface with data management tools ################################################################################### # Constructor for the management interface class ################################################################################### def __init__(self): # called when the class is created # Set environment to allow file overwrite arcpy.env.overwriteOutput=True ################################################################################### # Add a field to a shapefile attribute table # Inputs: # InShapefile - polyline shapefile with parallel lines # FieldName - Field name string # FieldType - Field type string: e.g. "TEXT", "DOUBLE", "FLOAT", "LONG" # FieldDigits - integer for number of digits to store in field # FieldDecimal - integer for number of decimal places for field # FieldLength - integer for length of field if text or blob ################################################################################### def AddField(self,InputShapefile,FieldName,FieldType,FieldDigits,FieldDecimal,FieldLength): # add a field to a attribute table try: # Create a list of the field names TheFieldNames = [f.name for f in arcpy.ListFields(InputShapefile)] # Check to see if field already exists for Item in TheFieldNames: if FieldName==Item: raise RuntimeError(FieldName+" already exists.") #Call function depending on the type of field if FieldType==('TEXT' or 'BLOB'): arcpy.management.AddField(InputShapefile,FieldName,FieldType, "#","#",FieldLength) elif FieldType==('LONG' or 'SHORT'): arcpy.management.AddField(InputShapefile,FieldName,FieldType,FieldDigits) elif FieldType==('DATE' or 'RASTER' or 'GUID'): arcpy.management.AddField(InputShapefile,FieldName,FieldType) else: arcpy.management.AddField(InputShapefile,FieldName,FieldType, FieldDigits,FieldDecimal) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: AddField Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Appends multiple input datasets into an existing target dataset. # Inputs: # InShapefile - Shapefile name as a string # TargetShapefile - destination shapefile path and name as a string ################################################################################### def Append(self,InShapefile,TargetShapefile): # convert polygon to polyline try: arcpy.management.Append(InShapefile,TargetShapefile) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: Append Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Converts polygon boundaries to lines, or splitting line, polygon, or both features at their intersections. # Inputs: # InShapefile - shapefile path and name to be converted # OutLayer - string for layer name ################################################################################### def CreateLayer(self,InShapefile,OutLayer): # create feature layer from feature class try: arcpy.management.MakeFeatureLayer(InShapefile,OutLayer) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: CreateLayer Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Copies features from the input feature class or layer to a new feature class. # Inputs: # InLayer - Layer name as a string # OutShapefile - copied shapefile path and name as a string ################################################################################### def CopyFeatures(self,InLayer,OutShapefile): # convert polygon to polyline try: arcpy.management.CopyFeatures(InLayer,OutShapefile) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: CopyFeatures Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Determines the total number of rows for a feature class, table, layer, or raster. # Inputs: # TheTable: the string for the name and path of a feature class, table, layer, or raster # Output: # TheCount: an integer for the number of rows ################################################################################### def CountRows(self,TheTable): # count rows try: #As is returns arcobject, therefore need int, and getOutput(0) TheCount=int(arcpy.management.GetCount(TheTable).getOutput(0)) return(TheCount) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: CountRows Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Permanently deletes data from disk # Inputs: # InData - Data element name as a string ################################################################################### def Delete(self,InData): try: arcpy.management.Delete(InData) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: Delete Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Deletes a field from a table # Inputs: # InTable - table path and name to be manipulated # DropField - string for field name to be deleted ################################################################################### def DeleteField(self,InTable,DropField): # create feature layer from feature class try: arcpy.management.DeleteField(InTable,DropField) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: DeleteField Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Aggregates features based on specified attributes. # Inputs: # InShapefile - shapefile path and name to be merged # OutShapefile - merged polyline shapefile path and name # DissolveField - The field or fields on which to aggregate features. # StatsField - The fields and statistics with which to summarize attributes: e.g. "FIRST" # Multi - Specifies whether multipart features are allowed in the output feature class: "MULTI_PART", "SINGLE_PART" # Unsplit - Controls how line features are dissolved: "DISSOLVE_LINES": single feature, # "UNSPLIT_LINES": single feature only when lines share a vertex ################################################################################### def Dissolve(self,InShapefile,OutShapefile,DissolveField,StatsField,Multi,Unsplit): # merge polylines try: arcpy.management.Dissolve(InShapefile,OutShapefile,DissolveField,StatsField,Multi,Unsplit) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: Dissolve Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Merge together multiple shapefiles into one # Inputs: # InShapefiles - shapefile names as a list - or separted by semi-colons # OutShapefile - output shapefile path and name as a string ################################################################################### def MergeShapefiles(self,InShapefiles,OutShapefile): try: arcpy.management.Merge(InShapefiles,OutShapefile,"#") except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: MergeShapefiles Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Creates a feature class containing singlepart features generated by separating # multipart input features. # Inputs: # InShapefiles - input shapefile path and name as a string # OutShapefile - output shapefile path and name as a string ################################################################################### def Multipart2Single(self,InShapefiles,OutShapefile): try: arcpy.management.MultipartToSinglepart(InShapefiles,OutShapefile) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: MergeShapefiles Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Creates lines from points according to a field and sorts according to a field # Inputs: # InShapefile - Shapefile name as a string # LineField - string of field name to define lines by # SortField - string of field name to sort lines by # OutShapefile - output shapefile path and name as a string ################################################################################### def Points2Line(self,InShapefile,OutShapefile,LineField,SortField): # convert polygon to polyline try: arcpy.management.PointsToLine(InShapefile,OutShapefile,LineField,SortField) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: Points2Line Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Converts polygon boundaries to lines, or splitting line, polygon, or both features at their intersections. # Inputs: # InShapefile - shapefile path and name to be converted # OutShapefile - polyline shapefile path and name # MinDistance - The minimum distance separating all feature coordinates as an integer # Attributes - Specifies whether to preserve or omit the input attributes in the output feature class. ################################################################################### def Polygon2Polyline(self,InShapefile,OutShapefile,MinDistance,Attributes): # convert polygon to polyline try: arcpy.management.FeatureToLine(InShapefile,OutShapefile,MinDistance,Attributes) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: Polygon2Polyline Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Projects a shapefile from one coordinate system to another # Inputs: # InShapefile - shapefile path and name to be converted # OutShapefile - polyline shapefile path and name # OutCoordinateSys - name of output projection as a string # TransMethod - Transformation method as a string ################################################################################### def ProjectShapefile(self,InShapefile,OutShapefile,OutCoordinateSys,TransMethod): try: arcpy.management.Project(InShapefile,OutShapefile,OutCoordinateSys,TransMethod) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: ProjectFile Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Creates random points along a guide shapefile or within extent # Inputs: # TheFilePath - folder to put output shapefile in # OutPoints - name of output point shapefile # InPolyline - polyline shapefile path and name for "guide" (can be "" if using extent) # Extent - extent to use to constain points (can be "" if using shapefile) # PointNumber - number of points to create or field used to create # PointSpacing - minimum allowed spacing between points # Multi - multipart or single part feature: "POINT" (default), or "MULTIPOINT" # MultiSize - number of points in each multipoint ################################################################################### def RandomPts(self,TheFilePath,OutPoints,InPolyline,Extent,PointNumber,PointSpacing,Multi,MultiSize): # create guided random points try: arcpy.management.CreateRandomPoints(TheFilePath,OutPoints, InPolyline,Extent,PointNumber,PointSpacing,Multi,MultiSize) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: RandomPts Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Extacts the properties for a raster # Inputs: # InRaster - raster path and name as a string # InParameters - string or list of desired properties as strings # Outputs: # OutValues - string or list of values for properties as strings ################################################################################### def RasterProp(self,InRaster,InParameters): try: if isinstance(InParameters,str): #Get the geoprocessing result object PropertyObject=arcpy.management.GetRasterProperties(InRaster,InParameters) #Get the value from geoprocessing result object OutValues=PropertyObject.getOutput(0) else: OutValues=[] for Parameter in InParameters: #Get the geoprocessing result object PropertyObject=arcpy.management.GetRasterProperties(InRaster,Parameter) #Get the value from geoprocessing result object OutValues=OutValues+[PropertyObject.getOutput(0)] return(OutValues) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: RasterProp Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Adds, updates, or removes a selection on a layer or table view based on an attribute query. # Inputs: # InLayer - Layer name as a string # Type - type of selection as string: e.g. "NEW_SELECTION", "ADD_TO_SELECTION" # SQLexp - SQL statement used to select a subset of records. ################################################################################### def SelectUsingAttributes(self,InLayer,Type,SQLexp): try: arcpy.management.SelectLayerByAttribute(InLayer,Type,SQLexp) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: SelectUsingAttribute Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Adds, updates, or removes a selection on a layer or table view based on an attribute query. # Inputs: # InLayer - Layer name as a string # Relationship - The spatial relationship to be evaluated. ("INTERSECT" is default) # SelectFeatures - The features in the Input Feature Layer will be selected based on their relationship # to the features from this layer or feature class. # Distance - value for how far away to look # Type - type of selection as string: e.g. "NEW_SELECTION", "ADD_TO_SELECTION" ################################################################################### def SelectUsingLocation(self,InLayer,Relationship,SelectFeatures,Distance,Type): try: arcpy.management.SelectLayerByLocation(InLayer,Relationship,SelectFeatures,Distance,Type) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: SelectUsingAttribute Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Split a line at points # Inputs: # InLine: Input polyline Shapefile # InPoints: Input point shapefile to split at # OutShapefile: name of output shapefile # Radius: Number used to split lines by their proximity to point features. ################################################################################### def SplitLineAtPoints(self,InLine,InPoints,OutShapefile,Radius): try: arcpy.management.SplitLineAtPoint(InLine,InPoints,OutShapefile,Radius) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: SplitLineAtPoints Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Converts feature vertices to points # Inputs: # InShapefile - shapefile path and name to be converted # OutShapefile - point shapefile path and name # WhichVertices - species which vertices to create points at: e.g. "ALL", "MID", "START", "END", "BOTH_ENDS"... ################################################################################### def Vertices2Points(self,InShapefile,OutShapefile,WhichVertices): # convert polygon to polyline try: arcpy.management.FeatureVerticesToPoints(InShapefile,OutShapefile,WhichVertices) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: Vertices2Points Failed ("+str(err)+")") #raise "grabs" error for use in higher level ################################################################################### # Write to a field in a shapefile attribute table # Inputs: # InShapefile: Input Shapefile # FieldName: Field name string # Statement: string expression for entering into field # StatementType: Code base for the expression "VB", "PYTHON" ################################################################################### def WriteField(self,InShapefile,FieldName,Statement,StatementType): # write to a field in a attribute table try: arcpy.management.CalculateField(InShapefile,FieldName,Statement,StatementType) except Exception, err: # an error occurred (probably in arcGIS) raise RuntimeError("** Error: WriteField Failed ("+str(err)+")") #raise "grabs" error for use in higher level
mit
115,150,130,067,006,380
54.636103
133
0.57563
false
4.493636
false
false
false
modcloth/tory-client
tory_client/sync/joyent.py
1
1277
# vim:fileencoding=utf-8 from ..client import put_host from ..junkdrawer import stringified_dict def sync_machine(log, server, auth_token, machine): machine = machine.copy() host_def = machine.copy() for key in ('id', 'ips'): host_def.pop(key) interpreter = '/usr/bin/python' user = 'ubuntu' if machine.get('type') == 'smartmachine': interpreter = '/opt/local/bin/python' user = 'root' hostname = machine['name'] ip = machine.pop('ips')[0] host_def.update({ 'ip': ip, 'name': hostname, 'vars': { 'ansible_python_interpreter': interpreter, 'ansible_ssh_host': ip, 'ansible_ssh_user': user, 'ansible_inventory_hostname': hostname, 'disk': str(machine.pop('disk')), 'joyent_id': str(machine.pop('id')), 'memory': str(machine.pop('memory')), }, 'tags': stringified_dict(machine.pop('tags')), }) status = put_host(server, auth_token, host_def) if status == 201: log.info('Added host %s', hostname) elif status == 200: log.info('Updated host %s', hostname) else: log.warn('Failed to create or update host %s: %s', hostname, status)
mit
2,187,057,721,023,259,600
28.697674
58
0.555991
false
3.680115
false
false
false
MeshCollider/bitcoin
test/functional/wallet_basic.py
8
35202
#!/usr/bin/env python3 # Copyright (c) 2014-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the wallet.""" from decimal import Decimal from itertools import product from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_array_result, assert_equal, assert_fee_amount, assert_raises_rpc_error, ) from test_framework.wallet_util import test_address OUT_OF_RANGE = "Amount out of range" class WalletTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 4 self.extra_args = [[ "-acceptnonstdtxn=1", ]] * self.num_nodes self.setup_clean_chain = True self.supports_cli = False def skip_test_if_missing_module(self): self.skip_if_no_wallet() def setup_network(self): self.setup_nodes() # Only need nodes 0-2 running at start of test self.stop_node(3) self.connect_nodes(0, 1) self.connect_nodes(1, 2) self.connect_nodes(0, 2) self.sync_all(self.nodes[0:3]) def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size): """Return curr_balance after asserting the fee was in range""" fee = balance_with_fee - curr_balance assert_fee_amount(fee, tx_size, fee_per_byte * 1000) return curr_balance def get_vsize(self, txn): return self.nodes[0].decoderawtransaction(txn)['vsize'] def run_test(self): # Check that there's no UTXO on none of the nodes assert_equal(len(self.nodes[0].listunspent()), 0) assert_equal(len(self.nodes[1].listunspent()), 0) assert_equal(len(self.nodes[2].listunspent()), 0) self.log.info("Mining blocks...") self.nodes[0].generate(1) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 50) assert_equal(walletinfo['balance'], 0) self.sync_all(self.nodes[0:3]) self.nodes[1].generate(101) self.sync_all(self.nodes[0:3]) assert_equal(self.nodes[0].getbalance(), 50) assert_equal(self.nodes[1].getbalance(), 50) assert_equal(self.nodes[2].getbalance(), 0) # Check that only first and second nodes have UTXOs utxos = self.nodes[0].listunspent() assert_equal(len(utxos), 1) assert_equal(len(self.nodes[1].listunspent()), 1) assert_equal(len(self.nodes[2].listunspent()), 0) self.log.info("Test gettxout") confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"] # First, outputs that are unspent both in the chain and in the # mempool should appear with or without include_mempool txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False) assert_equal(txout['value'], 50) txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True) assert_equal(txout['value'], 50) # Send 21 BTC from 0 to 2 using sendtoaddress call. self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11) mempool_txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) self.log.info("Test gettxout (second part)") # utxo spent in mempool should be visible if you exclude mempool # but invisible if you include mempool txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False) assert_equal(txout['value'], 50) txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True) assert txout is None # new utxo from mempool should be invisible if you exclude mempool # but visible if you include mempool txout = self.nodes[0].gettxout(mempool_txid, 0, False) assert txout is None txout1 = self.nodes[0].gettxout(mempool_txid, 0, True) txout2 = self.nodes[0].gettxout(mempool_txid, 1, True) # note the mempool tx will have randomly assigned indices # but 10 will go to node2 and the rest will go to node0 balance = self.nodes[0].getbalance() assert_equal(set([txout1['value'], txout2['value']]), set([10, balance])) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 0) # Have node0 mine a block, thus it will collect its own fee. self.nodes[0].generate(1) self.sync_all(self.nodes[0:3]) # Exercise locking of unspent outputs unspent_0 = self.nodes[2].listunspent()[0] unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]} assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0]) self.nodes[2].lockunspent(False, [unspent_0]) assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0]) assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20) assert_equal([unspent_0], self.nodes[2].listlockunspent()) self.nodes[2].lockunspent(True, [unspent_0]) assert_equal(len(self.nodes[2].listlockunspent()), 0) assert_raises_rpc_error(-8, "txid must be of length 64 (not 34, for '0000000000000000000000000000000000')", self.nodes[2].lockunspent, False, [{"txid": "0000000000000000000000000000000000", "vout": 0}]) assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[2].lockunspent, False, [{"txid": "ZZZ0000000000000000000000000000000000000000000000000000000000000", "vout": 0}]) assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction", self.nodes[2].lockunspent, False, [{"txid": "0000000000000000000000000000000000000000000000000000000000000000", "vout": 0}]) assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds", self.nodes[2].lockunspent, False, [{"txid": unspent_0["txid"], "vout": 999}]) # The lock on a manually selected output is ignored unspent_0 = self.nodes[1].listunspent()[0] self.nodes[1].lockunspent(False, [unspent_0]) tx = self.nodes[1].createrawtransaction([unspent_0], { self.nodes[1].getnewaddress() : 1 }) self.nodes[1].fundrawtransaction(tx,{"lockUnspents": True}) # fundrawtransaction can lock an input self.nodes[1].lockunspent(True, [unspent_0]) assert_equal(len(self.nodes[1].listlockunspent()), 0) tx = self.nodes[1].fundrawtransaction(tx,{"lockUnspents": True})['hex'] assert_equal(len(self.nodes[1].listlockunspent()), 1) # Send transaction tx = self.nodes[1].signrawtransactionwithwallet(tx)["hex"] self.nodes[1].sendrawtransaction(tx) assert_equal(len(self.nodes[1].listlockunspent()), 0) # Have node1 generate 100 blocks (so node0 can recover the fee) self.nodes[1].generate(100) self.sync_all(self.nodes[0:3]) # node0 should end up with 100 btc in block rewards plus fees, but # minus the 21 plus fees sent to node2 assert_equal(self.nodes[0].getbalance(), 100 - 21) assert_equal(self.nodes[2].getbalance(), 21) # Node0 should have two unspent outputs. # Create a couple of transactions to send them to node2, submit them through # node1, and make sure both node0 and node2 pick them up properly: node0utxos = self.nodes[0].listunspent(1) assert_equal(len(node0utxos), 2) # create both transactions txns_to_send = [] for utxo in node0utxos: inputs = [] outputs = {} inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) outputs[self.nodes[2].getnewaddress()] = utxo["amount"] - 3 raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) txns_to_send.append(self.nodes[0].signrawtransactionwithwallet(raw_tx)) # Have node 1 (miner) send the transactions self.nodes[1].sendrawtransaction(hexstring=txns_to_send[0]["hex"], maxfeerate=0) self.nodes[1].sendrawtransaction(hexstring=txns_to_send[1]["hex"], maxfeerate=0) # Have node1 mine a block to confirm transactions: self.nodes[1].generate(1) self.sync_all(self.nodes[0:3]) assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 94) # Verify that a spent output cannot be locked anymore spent_0 = {"txid": node0utxos[0]["txid"], "vout": node0utxos[0]["vout"]} assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output", self.nodes[0].lockunspent, False, [spent_0]) # Send 10 BTC normal address = self.nodes[0].getnewaddress("test") fee_per_byte = Decimal('0.001') / 1000 self.nodes[2].settxfee(fee_per_byte * 1000) txid = self.nodes[2].sendtoaddress(address, 10, "", "", False) self.nodes[2].generate(1) self.sync_all(self.nodes[0:3]) node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(self.nodes[0].getbalance(), Decimal('10')) # Send 10 BTC with subtract fee from amount txid = self.nodes[2].sendtoaddress(address, 10, "", "", True) self.nodes[2].generate(1) self.sync_all(self.nodes[0:3]) node_2_bal -= Decimal('10') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) self.log.info("Test sendmany") # Sendmany 10 BTC txid = self.nodes[2].sendmany('', {address: 10}, 0, "", []) self.nodes[2].generate(1) self.sync_all(self.nodes[0:3]) node_0_bal += Decimal('10') node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(self.nodes[0].getbalance(), node_0_bal) # Sendmany 10 BTC with subtract fee from amount txid = self.nodes[2].sendmany('', {address: 10}, 0, "", [address]) self.nodes[2].generate(1) self.sync_all(self.nodes[0:3]) node_2_bal -= Decimal('10') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) self.log.info("Test sendmany with fee_rate param (explicit fee rate in sat/vB)") fee_rate_sat_vb = 2 fee_rate_btc_kvb = fee_rate_sat_vb * 1e3 / 1e8 explicit_fee_rate_btc_kvb = Decimal(fee_rate_btc_kvb) / 1000 # Test passing fee_rate as a string txid = self.nodes[2].sendmany(amounts={address: 10}, fee_rate=str(fee_rate_sat_vb)) self.nodes[2].generate(1) self.sync_all(self.nodes[0:3]) balance = self.nodes[2].getbalance() node_2_bal = self.check_fee_amount(balance, node_2_bal - Decimal('10'), explicit_fee_rate_btc_kvb, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(balance, node_2_bal) node_0_bal += Decimal('10') assert_equal(self.nodes[0].getbalance(), node_0_bal) # Test passing fee_rate as an integer amount = Decimal("0.0001") txid = self.nodes[2].sendmany(amounts={address: amount}, fee_rate=fee_rate_sat_vb) self.nodes[2].generate(1) self.sync_all(self.nodes[0:3]) balance = self.nodes[2].getbalance() node_2_bal = self.check_fee_amount(balance, node_2_bal - amount, explicit_fee_rate_btc_kvb, self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])) assert_equal(balance, node_2_bal) node_0_bal += amount assert_equal(self.nodes[0].getbalance(), node_0_bal) for key in ["totalFee", "feeRate"]: assert_raises_rpc_error(-8, "Unknown named parameter key", self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=1, key=1) # Test setting explicit fee rate just below the minimum. self.log.info("Test sendmany raises 'fee rate too low' if fee_rate of 0.99999999 is passed") assert_raises_rpc_error(-6, "Fee rate (0.999 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)", self.nodes[2].sendmany, amounts={address: 10}, fee_rate=0.99999999) self.log.info("Test sendmany raises if fee_rate of 0 or -1 is passed") assert_raises_rpc_error(-6, "Fee rate (0.000 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)", self.nodes[2].sendmany, amounts={address: 10}, fee_rate=0) assert_raises_rpc_error(-3, OUT_OF_RANGE, self.nodes[2].sendmany, amounts={address: 10}, fee_rate=-1) self.log.info("Test sendmany raises if an invalid conf_target or estimate_mode is passed") for target, mode in product([-1, 0, 1009], ["economical", "conservative"]): assert_raises_rpc_error(-8, "Invalid conf_target, must be between 1 and 1008", # max value of 1008 per src/policy/fees.h self.nodes[2].sendmany, amounts={address: 1}, conf_target=target, estimate_mode=mode) for target, mode in product([-1, 0], ["btc/kb", "sat/b"]): assert_raises_rpc_error(-8, 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"', self.nodes[2].sendmany, amounts={address: 1}, conf_target=target, estimate_mode=mode) self.start_node(3, self.nodes[3].extra_args) self.connect_nodes(0, 3) self.sync_all() # check if we can list zero value tx as available coins # 1. create raw_tx # 2. hex-changed one output to 0.0 # 3. sign and send # 4. check if recipient (node0) can list the zero value tx usp = self.nodes[1].listunspent(query_options={'minimumAmount': '49.998'})[0] inputs = [{"txid": usp['txid'], "vout": usp['vout']}] outputs = {self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11} raw_tx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") # replace 11.11 with 0.0 (int32) signed_raw_tx = self.nodes[1].signrawtransactionwithwallet(raw_tx) decoded_raw_tx = self.nodes[1].decoderawtransaction(signed_raw_tx['hex']) zero_value_txid = decoded_raw_tx['txid'] self.nodes[1].sendrawtransaction(signed_raw_tx['hex']) self.sync_all() self.nodes[1].generate(1) # mine a block self.sync_all() unspent_txs = self.nodes[0].listunspent() # zero value tx must be in listunspents output found = False for uTx in unspent_txs: if uTx['txid'] == zero_value_txid: found = True assert_equal(uTx['amount'], Decimal('0')) assert found self.log.info("Test -walletbroadcast") self.stop_nodes() self.start_node(0, ["-walletbroadcast=0"]) self.start_node(1, ["-walletbroadcast=0"]) self.start_node(2, ["-walletbroadcast=0"]) self.connect_nodes(0, 1) self.connect_nodes(1, 2) self.connect_nodes(0, 2) self.sync_all(self.nodes[0:3]) txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) self.nodes[1].generate(1) # mine a block, tx should not be in there self.sync_all(self.nodes[0:3]) assert_equal(self.nodes[2].getbalance(), node_2_bal) # should not be changed because tx was not broadcasted # now broadcast from another node, mine a block, sync, and check the balance self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex']) self.nodes[1].generate(1) self.sync_all(self.nodes[0:3]) node_2_bal += 2 tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) assert_equal(self.nodes[2].getbalance(), node_2_bal) # create another tx self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) # restart the nodes with -walletbroadcast=1 self.stop_nodes() self.start_node(0) self.start_node(1) self.start_node(2) self.connect_nodes(0, 1) self.connect_nodes(1, 2) self.connect_nodes(0, 2) self.sync_blocks(self.nodes[0:3]) self.nodes[0].generate(1) self.sync_blocks(self.nodes[0:3]) node_2_bal += 2 # tx should be added to balance because after restarting the nodes tx should be broadcast assert_equal(self.nodes[2].getbalance(), node_2_bal) # send a tx with value in a string (PR#6380 +) txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2") tx_obj = self.nodes[0].gettransaction(txid) assert_equal(tx_obj['amount'], Decimal('-2')) txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001") tx_obj = self.nodes[0].gettransaction(txid) assert_equal(tx_obj['amount'], Decimal('-0.0001')) # check if JSON parser can handle scientific notation in strings txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4") tx_obj = self.nodes[0].gettransaction(txid) assert_equal(tx_obj['amount'], Decimal('-0.0001')) # General checks for errors from incorrect inputs # This will raise an exception because the amount is negative assert_raises_rpc_error(-3, OUT_OF_RANGE, self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "-1") # This will raise an exception because the amount type is wrong assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4") # This will raise an exception since generate does not accept a string assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2") if not self.options.descriptors: # This will raise an exception for the invalid private key format assert_raises_rpc_error(-5, "Invalid private key encoding", self.nodes[0].importprivkey, "invalid") # This will raise an exception for importing an address with the PS2H flag temp_address = self.nodes[1].getnewaddress("", "p2sh-segwit") assert_raises_rpc_error(-5, "Cannot use the p2sh flag with an address - use a script instead", self.nodes[0].importaddress, temp_address, "label", False, True) # This will raise an exception for attempting to dump the private key of an address you do not own assert_raises_rpc_error(-3, "Address does not refer to a key", self.nodes[0].dumpprivkey, temp_address) # This will raise an exception for attempting to get the private key of an invalid Bitcoin address assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].dumpprivkey, "invalid") # This will raise an exception for attempting to set a label for an invalid Bitcoin address assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].setlabel, "invalid address", "label") # This will raise an exception for importing an invalid address assert_raises_rpc_error(-5, "Invalid Bitcoin address or script", self.nodes[0].importaddress, "invalid") # This will raise an exception for attempting to import a pubkey that isn't in hex assert_raises_rpc_error(-5, "Pubkey must be a hex string", self.nodes[0].importpubkey, "not hex") # This will raise an exception for importing an invalid pubkey assert_raises_rpc_error(-5, "Pubkey is not a valid public key", self.nodes[0].importpubkey, "5361746f736869204e616b616d6f746f") # Import address and private key to check correct behavior of spendable unspents # 1. Send some coins to generate new UTXO address_to_import = self.nodes[2].getnewaddress() txid = self.nodes[0].sendtoaddress(address_to_import, 1) self.nodes[0].generate(1) self.sync_all(self.nodes[0:3]) self.log.info("Test sendtoaddress with fee_rate param (explicit fee rate in sat/vB)") prebalance = self.nodes[2].getbalance() assert prebalance > 2 address = self.nodes[1].getnewaddress() amount = 3 fee_rate_sat_vb = 2 fee_rate_btc_kvb = fee_rate_sat_vb * 1e3 / 1e8 # Test passing fee_rate as an integer txid = self.nodes[2].sendtoaddress(address=address, amount=amount, fee_rate=fee_rate_sat_vb) tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex']) self.nodes[0].generate(1) self.sync_all(self.nodes[0:3]) postbalance = self.nodes[2].getbalance() fee = prebalance - postbalance - Decimal(amount) assert_fee_amount(fee, tx_size, Decimal(fee_rate_btc_kvb)) prebalance = self.nodes[2].getbalance() amount = Decimal("0.001") fee_rate_sat_vb = 1.23 fee_rate_btc_kvb = fee_rate_sat_vb * 1e3 / 1e8 # Test passing fee_rate as a string txid = self.nodes[2].sendtoaddress(address=address, amount=amount, fee_rate=str(fee_rate_sat_vb)) tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex']) self.nodes[0].generate(1) self.sync_all(self.nodes[0:3]) postbalance = self.nodes[2].getbalance() fee = prebalance - postbalance - amount assert_fee_amount(fee, tx_size, Decimal(fee_rate_btc_kvb)) for key in ["totalFee", "feeRate"]: assert_raises_rpc_error(-8, "Unknown named parameter key", self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=1, key=1) # Test setting explicit fee rate just below the minimum. self.log.info("Test sendtoaddress raises 'fee rate too low' if fee_rate of 0.99999999 is passed") assert_raises_rpc_error(-6, "Fee rate (0.999 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)", self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=0.99999999) self.log.info("Test sendtoaddress raises if fee_rate of 0 or -1 is passed") assert_raises_rpc_error(-6, "Fee rate (0.000 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)", self.nodes[2].sendtoaddress, address=address, amount=10, fee_rate=0) assert_raises_rpc_error(-3, OUT_OF_RANGE, self.nodes[2].sendtoaddress, address=address, amount=1.0, fee_rate=-1) self.log.info("Test sendtoaddress raises if an invalid conf_target or estimate_mode is passed") for target, mode in product([-1, 0, 1009], ["economical", "conservative"]): assert_raises_rpc_error(-8, "Invalid conf_target, must be between 1 and 1008", # max value of 1008 per src/policy/fees.h self.nodes[2].sendtoaddress, address=address, amount=1, conf_target=target, estimate_mode=mode) for target, mode in product([-1, 0], ["btc/kb", "sat/b"]): assert_raises_rpc_error(-8, 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"', self.nodes[2].sendtoaddress, address=address, amount=1, conf_target=target, estimate_mode=mode) # 2. Import address from node2 to node1 self.nodes[1].importaddress(address_to_import) # 3. Validate that the imported address is watch-only on node1 assert self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"] # 4. Check that the unspents after import are not spendable assert_array_result(self.nodes[1].listunspent(), {"address": address_to_import}, {"spendable": False}) # 5. Import private key of the previously imported address on node1 priv_key = self.nodes[2].dumpprivkey(address_to_import) self.nodes[1].importprivkey(priv_key) # 6. Check that the unspents are now spendable on node1 assert_array_result(self.nodes[1].listunspent(), {"address": address_to_import}, {"spendable": True}) # Mine a block from node0 to an address from node1 coinbase_addr = self.nodes[1].getnewaddress() block_hash = self.nodes[0].generatetoaddress(1, coinbase_addr)[0] coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0] self.sync_all(self.nodes[0:3]) # Check that the txid and balance is found by node1 self.nodes[1].gettransaction(coinbase_txid) # check if wallet or blockchain maintenance changes the balance self.sync_all(self.nodes[0:3]) blocks = self.nodes[0].generate(2) self.sync_all(self.nodes[0:3]) balance_nodes = [self.nodes[i].getbalance() for i in range(3)] block_count = self.nodes[0].getblockcount() # Check modes: # - True: unicode escaped as \u.... # - False: unicode directly as UTF-8 for mode in [True, False]: self.nodes[0].rpc.ensure_ascii = mode # unicode check: Basic Multilingual Plane, Supplementary Plane respectively for label in [u'рыба', u'𝅘𝅥𝅯']: addr = self.nodes[0].getnewaddress() self.nodes[0].setlabel(addr, label) test_address(self.nodes[0], addr, labels=[label]) assert label in self.nodes[0].listlabels() self.nodes[0].rpc.ensure_ascii = True # restore to default # maintenance tests maintenance = [ '-rescan', '-reindex', ] chainlimit = 6 for m in maintenance: self.log.info("Test " + m) self.stop_nodes() # set lower ancestor limit for later self.start_node(0, [m, "-limitancestorcount=" + str(chainlimit)]) self.start_node(1, [m, "-limitancestorcount=" + str(chainlimit)]) self.start_node(2, [m, "-limitancestorcount=" + str(chainlimit)]) if m == '-reindex': # reindex will leave rpc warm up "early"; Wait for it to finish self.wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)]) assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)]) # Exercise listsinceblock with the last two blocks coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0]) assert_equal(coinbase_tx_1["lastblock"], blocks[1]) assert_equal(len(coinbase_tx_1["transactions"]), 1) assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1]) assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0) # ==Check that wallet prefers to use coins that don't exceed mempool limits ===== # Get all non-zero utxos together chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()] singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True) self.nodes[0].generate(1) node0_balance = self.nodes[0].getbalance() # Split into two chains rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {chain_addrs[0]: node0_balance / 2 - Decimal('0.01'), chain_addrs[1]: node0_balance / 2 - Decimal('0.01')}) signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) singletxid = self.nodes[0].sendrawtransaction(hexstring=signedtx["hex"], maxfeerate=0) self.nodes[0].generate(1) # Make a long chain of unconfirmed payments without hitting mempool limit # Each tx we make leaves only one output of change on a chain 1 longer # Since the amount to send is always much less than the outputs, we only ever need one output # So we should be able to generate exactly chainlimit txs for each original output sending_addr = self.nodes[1].getnewaddress() txid_list = [] for _ in range(chainlimit * 2): txid_list.append(self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001'))) assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit * 2) assert_equal(len(txid_list), chainlimit * 2) # Without walletrejectlongchains, we will still generate a txid # The tx will be stored in the wallet but not accepted to the mempool extra_txid = self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001')) assert extra_txid not in self.nodes[0].getrawmempool() assert extra_txid in [tx["txid"] for tx in self.nodes[0].listtransactions()] self.nodes[0].abandontransaction(extra_txid) total_txs = len(self.nodes[0].listtransactions("*", 99999)) # Try with walletrejectlongchains # Double chain limit but require combining inputs, so we pass SelectCoinsMinConf self.stop_node(0) extra_args = ["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)] self.start_node(0, extra_args=extra_args) # wait until the wallet has submitted all transactions to the mempool self.wait_until(lambda: len(self.nodes[0].getrawmempool()) == chainlimit * 2) # Prevent potential race condition when calling wallet RPCs right after restart self.nodes[0].syncwithvalidationinterfacequeue() node0_balance = self.nodes[0].getbalance() # With walletrejectlongchains we will not create the tx and store it in our wallet. assert_raises_rpc_error(-6, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01')) # Verify nothing new in wallet assert_equal(total_txs, len(self.nodes[0].listtransactions("*", 99999))) # Test getaddressinfo on external address. Note that these addresses are taken from disablewallet.py assert_raises_rpc_error(-5, "Invalid prefix for Base58-encoded address", self.nodes[0].getaddressinfo, "3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy") address_info = self.nodes[0].getaddressinfo("mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ") assert_equal(address_info['address'], "mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ") assert_equal(address_info["scriptPubKey"], "76a9144e3854046c7bd1594ac904e4793b6a45b36dea0988ac") assert not address_info["ismine"] assert not address_info["iswatchonly"] assert not address_info["isscript"] assert not address_info["ischange"] # Test getaddressinfo 'ischange' field on change address. self.nodes[0].generate(1) destination = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(destination, 0.123) tx = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(txid)['hex']) output_addresses = [vout['scriptPubKey']['addresses'][0] for vout in tx["vout"]] assert len(output_addresses) > 1 for address in output_addresses: ischange = self.nodes[0].getaddressinfo(address)['ischange'] assert_equal(ischange, address != destination) if ischange: change = address self.nodes[0].setlabel(change, 'foobar') assert_equal(self.nodes[0].getaddressinfo(change)['ischange'], False) # Test gettransaction response with different arguments. self.log.info("Testing gettransaction response with different arguments...") self.nodes[0].setlabel(change, 'baz') baz = self.nodes[0].listtransactions(label="baz", count=1)[0] expected_receive_vout = {"label": "baz", "address": baz["address"], "amount": baz["amount"], "category": baz["category"], "vout": baz["vout"]} expected_fields = frozenset({'amount', 'bip125-replaceable', 'confirmations', 'details', 'fee', 'hex', 'time', 'timereceived', 'trusted', 'txid', 'walletconflicts'}) verbose_field = "decoded" expected_verbose_fields = expected_fields | {verbose_field} self.log.debug("Testing gettransaction response without verbose") tx = self.nodes[0].gettransaction(txid=txid) assert_equal(set([*tx]), expected_fields) assert_array_result(tx["details"], {"category": "receive"}, expected_receive_vout) self.log.debug("Testing gettransaction response with verbose set to False") tx = self.nodes[0].gettransaction(txid=txid, verbose=False) assert_equal(set([*tx]), expected_fields) assert_array_result(tx["details"], {"category": "receive"}, expected_receive_vout) self.log.debug("Testing gettransaction response with verbose set to True") tx = self.nodes[0].gettransaction(txid=txid, verbose=True) assert_equal(set([*tx]), expected_verbose_fields) assert_array_result(tx["details"], {"category": "receive"}, expected_receive_vout) assert_equal(tx[verbose_field], self.nodes[0].decoderawtransaction(tx["hex"])) self.log.info("Test send* RPCs with verbose=True") address = self.nodes[0].getnewaddress("test") txid_feeReason_one = self.nodes[2].sendtoaddress(address=address, amount=5, verbose=True) assert_equal(txid_feeReason_one["fee_reason"], "Fallback fee") txid_feeReason_two = self.nodes[2].sendmany(dummy='', amounts={address: 5}, verbose=True) assert_equal(txid_feeReason_two["fee_reason"], "Fallback fee") self.log.info("Test send* RPCs with verbose=False") txid_feeReason_three = self.nodes[2].sendtoaddress(address=address, amount=5, verbose=False) assert_equal(self.nodes[2].gettransaction(txid_feeReason_three)['txid'], txid_feeReason_three) txid_feeReason_four = self.nodes[2].sendmany(dummy='', amounts={address: 5}, verbose=False) assert_equal(self.nodes[2].gettransaction(txid_feeReason_four)['txid'], txid_feeReason_four) if __name__ == '__main__': WalletTest().main()
mit
6,660,457,864,703,686,000
52.569254
193
0.630629
false
3.638854
true
false
false
Urinx/SomeCodes
Python/calc_interpreter/calc2.py
1
2562
#!/usr/bin/env python INTEGER, PLUS, MINUS, EOF = 'INTEGER', 'PLUS', 'MINUS', 'EOF' class Token(object): def __init__(self, type, value): self.type = type self.value = value def __str__(self): return 'Token({type}, {value})'.format( type = self.type, value = self.repr(value) ) def __repr__(self): return self.__str__() class Interpreter(object): def __init__(self, text): self.text = text self.pos = 0 self.current_token = None self.current_char = self.text[self.pos] def error(self): raise Exception('Error parsing input') def advance(self): self.pos += 1 if self.pos >= len(self.text): self.current_char = None else: self.current_char = self.text[self.pos] def skip_whitespace(self): while self.current_char is not None and self.current_char.isspace(): self.advance() def integer(self): result = '' while self.current_char is not None and self.current_char.isdigit(): result += self.current_char self.advance() return int(result) def get_next_token(self): while self.current_char is not None: if self.current_char.isspace(): self.skip_whitespace() continue if self.current_char.isdigit(): return Token(INTEGER, self.integer()) if self.current_char == '+': self.advance() return Token(PLUS, '+') if self.current_char == '-': self.advance() return Token(MINUS, '-') self.error() return Token(EOF, None) def eat(self, token_type): if self.current_token.type == token_type: self.current_token = self.get_next_token() else: self.error() def expr(self): self.current_token = self.get_next_token() left = self.current_token self.eat(INTEGER) op = self.current_token if op.type == PLUS: self.eat(PLUS) else: self.eat(MINUS) right = self.current_token self.eat(INTEGER) if op.type == PLUS: result = left.value + right.value else: result = left.value - right.value return result def main(): while 1: try: text = raw_input('calc> ') except: print "\n[quit]" break if not text: continue interpreter = Interpreter(text) result = interpreter.expr() print result if __name__ == '__main__': main()
gpl-2.0
1,257,494,814,170,796,500
22.722222
74
0.54879
false
3.852632
false
false
false
codeforamerica/template-maker
template_maker/generator/models.py
1
1235
from template_maker.database import ( Column, Model, db, ReferenceCol ) class DocumentBase(Model): ''' Metadata about a new document, relates back to a template ''' __tablename__ = 'document_base' id = Column(db.Integer, primary_key=True) created_at = Column(db.DateTime) updated_at = Column(db.DateTime) name = Column(db.String(255)) # published = Column(db.Boolean, default=False) template_id = ReferenceCol('template_base', ondelete='CASCADE') def __init__(self, created_at, updated_at, name, template_id): self.created_at = created_at self.updated_at = updated_at self.name = name self.template_id = template_id class DocumentPlaceholder(Model): ''' A document's placeholder's value. Relates to the document and the placeholder ''' __tablename__ = 'document_placeholder' id = Column(db.Integer, primary_key=True) document_id = ReferenceCol('document_base') placeholder_id = ReferenceCol('template_placeholders', ondelete='CASCADE') value = Column(db.Text) def __init__(self, document_id, placeholder_id): self.document_id = document_id self.placeholder_id = placeholder_id
bsd-3-clause
2,186,300,471,116,755,700
28.404762
78
0.653441
false
3.847352
false
false
false
Graghav/surabi
ADMIN/venv/lib/python2.7/site-packages/pymongo/cursor.py
11
42633
# Copyright 2009-2015 MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Cursor class to iterate over Mongo query results.""" import copy import datetime from collections import deque from bson import RE_TYPE from bson.code import Code from bson.py3compat import (iteritems, integer_types, string_type) from bson.son import SON from pymongo import helpers from pymongo.common import validate_boolean, validate_is_mapping from pymongo.errors import (AutoReconnect, ConnectionFailure, InvalidOperation, NotMasterError, OperationFailure) from pymongo.message import _CursorAddress, _GetMore, _Query, _convert_exception from pymongo.read_preferences import ReadPreference _QUERY_OPTIONS = { "tailable_cursor": 2, "slave_okay": 4, "oplog_replay": 8, "no_timeout": 16, "await_data": 32, "exhaust": 64, "partial": 128} class CursorType(object): NON_TAILABLE = 0 """The standard cursor type.""" TAILABLE = _QUERY_OPTIONS["tailable_cursor"] """The tailable cursor type. Tailable cursors are only for use with capped collections. They are not closed when the last data is retrieved but are kept open and the cursor location marks the final document position. If more data is received iteration of the cursor will continue from the last document received. """ TAILABLE_AWAIT = TAILABLE | _QUERY_OPTIONS["await_data"] """A tailable cursor with the await option set. Creates a tailable cursor that will wait for a few seconds after returning the full result set so that it can capture and return additional data added during the query. """ EXHAUST = _QUERY_OPTIONS["exhaust"] """An exhaust cursor. MongoDB will stream batched results to the client without waiting for the client to request each batch, reducing latency. """ # This has to be an old style class due to # http://bugs.jython.org/issue1057 class _SocketManager: """Used with exhaust cursors to ensure the socket is returned. """ def __init__(self, sock, pool): self.sock = sock self.pool = pool self.__closed = False def __del__(self): self.close() def close(self): """Return this instance's socket to the connection pool. """ if not self.__closed: self.__closed = True self.pool.return_socket(self.sock) self.sock, self.pool = None, None class Cursor(object): """A cursor / iterator over Mongo query results. """ def __init__(self, collection, filter=None, projection=None, skip=0, limit=0, no_cursor_timeout=False, cursor_type=CursorType.NON_TAILABLE, sort=None, allow_partial_results=False, oplog_replay=False, modifiers=None, batch_size=0, manipulate=True): """Create a new cursor. Should not be called directly by application developers - see :meth:`~pymongo.collection.Collection.find` instead. .. mongodoc:: cursors """ self.__id = None spec = filter if spec is None: spec = {} validate_is_mapping("filter", spec) if not isinstance(skip, int): raise TypeError("skip must be an instance of int") if not isinstance(limit, int): raise TypeError("limit must be an instance of int") validate_boolean("no_cursor_timeout", no_cursor_timeout) if cursor_type not in (CursorType.NON_TAILABLE, CursorType.TAILABLE, CursorType.TAILABLE_AWAIT, CursorType.EXHAUST): raise ValueError("not a valid value for cursor_type") validate_boolean("allow_partial_results", allow_partial_results) validate_boolean("oplog_replay", oplog_replay) if modifiers is not None: validate_is_mapping("modifiers", modifiers) if not isinstance(batch_size, integer_types): raise TypeError("batch_size must be an integer") if batch_size < 0: raise ValueError("batch_size must be >= 0") if projection is not None: if not projection: projection = {"_id": 1} projection = helpers._fields_list_to_dict(projection, "projection") self.__collection = collection self.__spec = spec self.__projection = projection self.__skip = skip self.__limit = limit self.__batch_size = batch_size self.__modifiers = modifiers and modifiers.copy() or {} self.__ordering = sort and helpers._index_document(sort) or None self.__max_scan = None self.__explain = False self.__hint = None self.__comment = None self.__max_time_ms = None self.__max_await_time_ms = None self.__max = None self.__min = None self.__manipulate = manipulate # Exhaust cursor support self.__exhaust = False self.__exhaust_mgr = None if cursor_type == CursorType.EXHAUST: if self.__collection.database.client.is_mongos: raise InvalidOperation('Exhaust cursors are ' 'not supported by mongos') if limit: raise InvalidOperation("Can't use limit and exhaust together.") self.__exhaust = True # This is ugly. People want to be able to do cursor[5:5] and # get an empty result set (old behavior was an # exception). It's hard to do that right, though, because the # server uses limit(0) to mean 'no limit'. So we set __empty # in that case and check for it when iterating. We also unset # it anytime we change __limit. self.__empty = False self.__data = deque() self.__address = None self.__retrieved = 0 self.__killed = False self.__codec_options = collection.codec_options self.__read_preference = collection.read_preference self.__read_concern = collection.read_concern self.__query_flags = cursor_type if self.__read_preference != ReadPreference.PRIMARY: self.__query_flags |= _QUERY_OPTIONS["slave_okay"] if no_cursor_timeout: self.__query_flags |= _QUERY_OPTIONS["no_timeout"] if allow_partial_results: self.__query_flags |= _QUERY_OPTIONS["partial"] if oplog_replay: self.__query_flags |= _QUERY_OPTIONS["oplog_replay"] @property def collection(self): """The :class:`~pymongo.collection.Collection` that this :class:`Cursor` is iterating. """ return self.__collection @property def retrieved(self): """The number of documents retrieved so far. """ return self.__retrieved def __del__(self): if self.__id and not self.__killed: self.__die() def rewind(self): """Rewind this cursor to its unevaluated state. Reset this cursor if it has been partially or completely evaluated. Any options that are present on the cursor will remain in effect. Future iterating performed on this cursor will cause new queries to be sent to the server, even if the resultant data has already been retrieved by this cursor. """ self.__data = deque() self.__id = None self.__address = None self.__retrieved = 0 self.__killed = False return self def clone(self): """Get a clone of this cursor. Returns a new Cursor instance with options matching those that have been set on the current instance. The clone will be completely unevaluated, even if the current instance has been partially or completely evaluated. """ return self._clone(True) def _clone(self, deepcopy=True): """Internal clone helper.""" clone = self._clone_base() values_to_clone = ("spec", "projection", "skip", "limit", "max_time_ms", "max_await_time_ms", "comment", "max", "min", "ordering", "explain", "hint", "batch_size", "max_scan", "manipulate", "query_flags", "modifiers") data = dict((k, v) for k, v in iteritems(self.__dict__) if k.startswith('_Cursor__') and k[9:] in values_to_clone) if deepcopy: data = self._deepcopy(data) clone.__dict__.update(data) return clone def _clone_base(self): """Creates an empty Cursor object for information to be copied into. """ return Cursor(self.__collection) def __die(self): """Closes this cursor. """ if self.__id and not self.__killed: if self.__exhaust and self.__exhaust_mgr: # If this is an exhaust cursor and we haven't completely # exhausted the result set we *must* close the socket # to stop the server from sending more data. self.__exhaust_mgr.sock.close() else: self.__collection.database.client.close_cursor( self.__id, _CursorAddress( self.__address, self.__collection.full_name)) if self.__exhaust and self.__exhaust_mgr: self.__exhaust_mgr.close() self.__killed = True def close(self): """Explicitly close / kill this cursor. Required for PyPy, Jython and other Python implementations that don't use reference counting garbage collection. """ self.__die() def __query_spec(self): """Get the spec to use for a query. """ operators = self.__modifiers.copy() if self.__ordering: operators["$orderby"] = self.__ordering if self.__explain: operators["$explain"] = True if self.__hint: operators["$hint"] = self.__hint if self.__comment: operators["$comment"] = self.__comment if self.__max_scan: operators["$maxScan"] = self.__max_scan if self.__max_time_ms is not None: operators["$maxTimeMS"] = self.__max_time_ms if self.__max: operators["$max"] = self.__max if self.__min: operators["$min"] = self.__min if operators: # Make a shallow copy so we can cleanly rewind or clone. spec = self.__spec.copy() # White-listed commands must be wrapped in $query. if "$query" not in spec: # $query has to come first spec = SON([("$query", spec)]) if not isinstance(spec, SON): # Ensure the spec is SON. As order is important this will # ensure its set before merging in any extra operators. spec = SON(spec) spec.update(operators) return spec # Have to wrap with $query if "query" is the first key. # We can't just use $query anytime "query" is a key as # that breaks commands like count and find_and_modify. # Checking spec.keys()[0] covers the case that the spec # was passed as an instance of SON or OrderedDict. elif ("query" in self.__spec and (len(self.__spec) == 1 or next(iter(self.__spec)) == "query")): return SON({"$query": self.__spec}) return self.__spec def __check_okay_to_chain(self): """Check if it is okay to chain more options onto this cursor. """ if self.__retrieved or self.__id is not None: raise InvalidOperation("cannot set options after executing query") def add_option(self, mask): """Set arbitrary query flags using a bitmask. To set the tailable flag: cursor.add_option(2) """ if not isinstance(mask, int): raise TypeError("mask must be an int") self.__check_okay_to_chain() if mask & _QUERY_OPTIONS["exhaust"]: if self.__limit: raise InvalidOperation("Can't use limit and exhaust together.") if self.__collection.database.client.is_mongos: raise InvalidOperation('Exhaust cursors are ' 'not supported by mongos') self.__exhaust = True self.__query_flags |= mask return self def remove_option(self, mask): """Unset arbitrary query flags using a bitmask. To unset the tailable flag: cursor.remove_option(2) """ if not isinstance(mask, int): raise TypeError("mask must be an int") self.__check_okay_to_chain() if mask & _QUERY_OPTIONS["exhaust"]: self.__exhaust = False self.__query_flags &= ~mask return self def limit(self, limit): """Limits the number of results to be returned by this cursor. Raises :exc:`TypeError` if `limit` is not an integer. Raises :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has already been used. The last `limit` applied to this cursor takes precedence. A limit of ``0`` is equivalent to no limit. :Parameters: - `limit`: the number of results to return .. mongodoc:: limit """ if not isinstance(limit, integer_types): raise TypeError("limit must be an integer") if self.__exhaust: raise InvalidOperation("Can't use limit and exhaust together.") self.__check_okay_to_chain() self.__empty = False self.__limit = limit return self def batch_size(self, batch_size): """Limits the number of documents returned in one batch. Each batch requires a round trip to the server. It can be adjusted to optimize performance and limit data transfer. .. note:: batch_size can not override MongoDB's internal limits on the amount of data it will return to the client in a single batch (i.e if you set batch size to 1,000,000,000, MongoDB will currently only return 4-16MB of results per batch). Raises :exc:`TypeError` if `batch_size` is not an integer. Raises :exc:`ValueError` if `batch_size` is less than ``0``. Raises :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has already been used. The last `batch_size` applied to this cursor takes precedence. :Parameters: - `batch_size`: The size of each batch of results requested. """ if not isinstance(batch_size, integer_types): raise TypeError("batch_size must be an integer") if batch_size < 0: raise ValueError("batch_size must be >= 0") self.__check_okay_to_chain() self.__batch_size = batch_size return self def skip(self, skip): """Skips the first `skip` results of this cursor. Raises :exc:`TypeError` if `skip` is not an integer. Raises :exc:`ValueError` if `skip` is less than ``0``. Raises :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has already been used. The last `skip` applied to this cursor takes precedence. :Parameters: - `skip`: the number of results to skip """ if not isinstance(skip, integer_types): raise TypeError("skip must be an integer") if skip < 0: raise ValueError("skip must be >= 0") self.__check_okay_to_chain() self.__skip = skip return self def max_time_ms(self, max_time_ms): """Specifies a time limit for a query operation. If the specified time is exceeded, the operation will be aborted and :exc:`~pymongo.errors.ExecutionTimeout` is raised. If `max_time_ms` is ``None`` no limit is applied. Raises :exc:`TypeError` if `max_time_ms` is not an integer or ``None``. Raises :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has already been used. :Parameters: - `max_time_ms`: the time limit after which the operation is aborted """ if (not isinstance(max_time_ms, integer_types) and max_time_ms is not None): raise TypeError("max_time_ms must be an integer or None") self.__check_okay_to_chain() self.__max_time_ms = max_time_ms return self def max_await_time_ms(self, max_await_time_ms): """Specifies a time limit for a getMore operation on a :attr:`~pymongo.cursor.CursorType.TAILABLE_AWAIT` cursor. For all other types of cursor max_await_time_ms is ignored. Raises :exc:`TypeError` if `max_await_time_ms` is not an integer or ``None``. Raises :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has already been used. .. note:: `max_await_time_ms` requires server version **>= 3.2** :Parameters: - `max_await_time_ms`: the time limit after which the operation is aborted .. versionadded:: 3.2 """ if (not isinstance(max_await_time_ms, integer_types) and max_await_time_ms is not None): raise TypeError("max_await_time_ms must be an integer or None") self.__check_okay_to_chain() # Ignore max_await_time_ms if not tailable or await_data is False. if self.__query_flags & CursorType.TAILABLE_AWAIT: self.__max_await_time_ms = max_await_time_ms return self def __getitem__(self, index): """Get a single document or a slice of documents from this cursor. Raises :class:`~pymongo.errors.InvalidOperation` if this cursor has already been used. To get a single document use an integral index, e.g.:: >>> db.test.find()[50] An :class:`IndexError` will be raised if the index is negative or greater than the amount of documents in this cursor. Any limit previously applied to this cursor will be ignored. To get a slice of documents use a slice index, e.g.:: >>> db.test.find()[20:25] This will return this cursor with a limit of ``5`` and skip of ``20`` applied. Using a slice index will override any prior limits or skips applied to this cursor (including those applied through previous calls to this method). Raises :class:`IndexError` when the slice has a step, a negative start value, or a stop value less than or equal to the start value. :Parameters: - `index`: An integer or slice index to be applied to this cursor """ self.__check_okay_to_chain() self.__empty = False if isinstance(index, slice): if index.step is not None: raise IndexError("Cursor instances do not support slice steps") skip = 0 if index.start is not None: if index.start < 0: raise IndexError("Cursor instances do not support" "negative indices") skip = index.start if index.stop is not None: limit = index.stop - skip if limit < 0: raise IndexError("stop index must be greater than start" "index for slice %r" % index) if limit == 0: self.__empty = True else: limit = 0 self.__skip = skip self.__limit = limit return self if isinstance(index, integer_types): if index < 0: raise IndexError("Cursor instances do not support negative" "indices") clone = self.clone() clone.skip(index + self.__skip) clone.limit(-1) # use a hard limit for doc in clone: return doc raise IndexError("no such item for Cursor instance") raise TypeError("index %r cannot be applied to Cursor " "instances" % index) def max_scan(self, max_scan): """Limit the number of documents to scan when performing the query. Raises :class:`~pymongo.errors.InvalidOperation` if this cursor has already been used. Only the last :meth:`max_scan` applied to this cursor has any effect. :Parameters: - `max_scan`: the maximum number of documents to scan """ self.__check_okay_to_chain() self.__max_scan = max_scan return self def max(self, spec): """Adds `max` operator that specifies upper bound for specific index. :Parameters: - `spec`: a list of field, limit pairs specifying the exclusive upper bound for all keys of a specific index in order. .. versionadded:: 2.7 """ if not isinstance(spec, (list, tuple)): raise TypeError("spec must be an instance of list or tuple") self.__check_okay_to_chain() self.__max = SON(spec) return self def min(self, spec): """Adds `min` operator that specifies lower bound for specific index. :Parameters: - `spec`: a list of field, limit pairs specifying the inclusive lower bound for all keys of a specific index in order. .. versionadded:: 2.7 """ if not isinstance(spec, (list, tuple)): raise TypeError("spec must be an instance of list or tuple") self.__check_okay_to_chain() self.__min = SON(spec) return self def sort(self, key_or_list, direction=None): """Sorts this cursor's results. Pass a field name and a direction, either :data:`~pymongo.ASCENDING` or :data:`~pymongo.DESCENDING`:: for doc in collection.find().sort('field', pymongo.ASCENDING): print(doc) To sort by multiple fields, pass a list of (key, direction) pairs:: for doc in collection.find().sort([ ('field1', pymongo.ASCENDING), ('field2', pymongo.DESCENDING)]): print(doc) Beginning with MongoDB version 2.6, text search results can be sorted by relevance:: cursor = db.test.find( {'$text': {'$search': 'some words'}}, {'score': {'$meta': 'textScore'}}) # Sort by 'score' field. cursor.sort([('score', {'$meta': 'textScore'})]) for doc in cursor: print(doc) Raises :class:`~pymongo.errors.InvalidOperation` if this cursor has already been used. Only the last :meth:`sort` applied to this cursor has any effect. :Parameters: - `key_or_list`: a single key or a list of (key, direction) pairs specifying the keys to sort on - `direction` (optional): only used if `key_or_list` is a single key, if not given :data:`~pymongo.ASCENDING` is assumed """ self.__check_okay_to_chain() keys = helpers._index_list(key_or_list, direction) self.__ordering = helpers._index_document(keys) return self def count(self, with_limit_and_skip=False): """Get the size of the results set for this query. Returns the number of documents in the results set for this query. Does not take :meth:`limit` and :meth:`skip` into account by default - set `with_limit_and_skip` to ``True`` if that is the desired behavior. Raises :class:`~pymongo.errors.OperationFailure` on a database error. When used with MongoDB >= 2.6, :meth:`~count` uses any :meth:`~hint` applied to the query. In the following example the hint is passed to the count command: collection.find({'field': 'value'}).hint('field_1').count() The :meth:`count` method obeys the :attr:`~pymongo.collection.Collection.read_preference` of the :class:`~pymongo.collection.Collection` instance on which :meth:`~pymongo.collection.Collection.find` was called. :Parameters: - `with_limit_and_skip` (optional): take any :meth:`limit` or :meth:`skip` that has been applied to this cursor into account when getting the count .. note:: The `with_limit_and_skip` parameter requires server version **>= 1.1.4-** .. versionchanged:: 2.8 The :meth:`~count` method now supports :meth:`~hint`. """ validate_boolean("with_limit_and_skip", with_limit_and_skip) cmd = SON([("count", self.__collection.name), ("query", self.__spec)]) if self.__max_time_ms is not None: cmd["maxTimeMS"] = self.__max_time_ms if self.__comment: cmd["$comment"] = self.__comment if self.__hint is not None: cmd["hint"] = self.__hint if with_limit_and_skip: if self.__limit: cmd["limit"] = self.__limit if self.__skip: cmd["skip"] = self.__skip return self.__collection._count(cmd) def distinct(self, key): """Get a list of distinct values for `key` among all documents in the result set of this query. Raises :class:`TypeError` if `key` is not an instance of :class:`basestring` (:class:`str` in python 3). The :meth:`distinct` method obeys the :attr:`~pymongo.collection.Collection.read_preference` of the :class:`~pymongo.collection.Collection` instance on which :meth:`~pymongo.collection.Collection.find` was called. :Parameters: - `key`: name of key for which we want to get the distinct values .. seealso:: :meth:`pymongo.collection.Collection.distinct` """ options = {} if self.__spec: options["query"] = self.__spec if self.__max_time_ms is not None: options['maxTimeMS'] = self.__max_time_ms if self.__comment: options['$comment'] = self.__comment return self.__collection.distinct(key, **options) def explain(self): """Returns an explain plan record for this cursor. .. mongodoc:: explain """ c = self.clone() c.__explain = True # always use a hard limit for explains if c.__limit: c.__limit = -abs(c.__limit) return next(c) def hint(self, index): """Adds a 'hint', telling Mongo the proper index to use for the query. Judicious use of hints can greatly improve query performance. When doing a query on multiple fields (at least one of which is indexed) pass the indexed field as a hint to the query. Hinting will not do anything if the corresponding index does not exist. Raises :class:`~pymongo.errors.InvalidOperation` if this cursor has already been used. `index` should be an index as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``) or the name of the index. If `index` is ``None`` any existing hint for this query is cleared. The last hint applied to this cursor takes precedence over all others. :Parameters: - `index`: index to hint on (as an index specifier) .. versionchanged:: 2.8 The :meth:`~hint` method accepts the name of the index. """ self.__check_okay_to_chain() if index is None: self.__hint = None return self if isinstance(index, string_type): self.__hint = index else: self.__hint = helpers._index_document(index) return self def comment(self, comment): """Adds a 'comment' to the cursor. http://docs.mongodb.org/manual/reference/operator/comment/ :Parameters: - `comment`: A string or document .. versionadded:: 2.7 """ self.__check_okay_to_chain() self.__comment = comment return self def where(self, code): """Adds a $where clause to this query. The `code` argument must be an instance of :class:`basestring` (:class:`str` in python 3) or :class:`~bson.code.Code` containing a JavaScript expression. This expression will be evaluated for each document scanned. Only those documents for which the expression evaluates to *true* will be returned as results. The keyword *this* refers to the object currently being scanned. Raises :class:`TypeError` if `code` is not an instance of :class:`basestring` (:class:`str` in python 3). Raises :class:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has already been used. Only the last call to :meth:`where` applied to a :class:`Cursor` has any effect. :Parameters: - `code`: JavaScript expression to use as a filter """ self.__check_okay_to_chain() if not isinstance(code, Code): code = Code(code) self.__spec["$where"] = code return self def __send_message(self, operation): """Send a query or getmore operation and handles the response. If operation is ``None`` this is an exhaust cursor, which reads the next result batch off the exhaust socket instead of sending getMore messages to the server. Can raise ConnectionFailure. """ client = self.__collection.database.client listeners = client._event_listeners publish = listeners.enabled_for_commands from_command = False if operation: kwargs = { "read_preference": self.__read_preference, "exhaust": self.__exhaust, } if self.__address is not None: kwargs["address"] = self.__address try: response = client._send_message_with_response(operation, **kwargs) self.__address = response.address if self.__exhaust: # 'response' is an ExhaustResponse. self.__exhaust_mgr = _SocketManager(response.socket_info, response.pool) cmd_name = operation.name data = response.data cmd_duration = response.duration rqst_id = response.request_id from_command = response.from_command except AutoReconnect: # Don't try to send kill cursors on another socket # or to another server. It can cause a _pinValue # assertion on some server releases if we get here # due to a socket timeout. self.__killed = True raise else: # Exhaust cursor - no getMore message. rqst_id = 0 cmd_name = 'getMore' if publish: # Fake a getMore command. cmd = SON([('getMore', self.__id), ('collection', self.__collection.name)]) if self.__batch_size: cmd['batchSize'] = self.__batch_size if self.__max_time_ms: cmd['maxTimeMS'] = self.__max_time_ms listeners.publish_command_start( cmd, self.__collection.database.name, 0, self.__address) start = datetime.datetime.now() try: data = self.__exhaust_mgr.sock.receive_message(1, None) except Exception as exc: if publish: duration = datetime.datetime.now() - start listeners.publish_command_failure( duration, _convert_exception(exc), cmd_name, rqst_id, self.__address) if isinstance(exc, ConnectionFailure): self.__die() raise if publish: cmd_duration = datetime.datetime.now() - start if publish: start = datetime.datetime.now() try: doc = helpers._unpack_response(response=data, cursor_id=self.__id, codec_options=self.__codec_options) if from_command: helpers._check_command_response(doc['data'][0]) except OperationFailure as exc: self.__killed = True # Make sure exhaust socket is returned immediately, if necessary. self.__die() if publish: duration = (datetime.datetime.now() - start) + cmd_duration listeners.publish_command_failure( duration, exc.details, cmd_name, rqst_id, self.__address) # If this is a tailable cursor the error is likely # due to capped collection roll over. Setting # self.__killed to True ensures Cursor.alive will be # False. No need to re-raise. if self.__query_flags & _QUERY_OPTIONS["tailable_cursor"]: return raise except NotMasterError as exc: # Don't send kill cursors to another server after a "not master" # error. It's completely pointless. self.__killed = True # Make sure exhaust socket is returned immediately, if necessary. self.__die() if publish: duration = (datetime.datetime.now() - start) + cmd_duration listeners.publish_command_failure( duration, exc.details, cmd_name, rqst_id, self.__address) client._reset_server_and_request_check(self.__address) raise except Exception as exc: if publish: duration = (datetime.datetime.now() - start) + cmd_duration listeners.publish_command_failure( duration, _convert_exception(exc), cmd_name, rqst_id, self.__address) raise if publish: duration = (datetime.datetime.now() - start) + cmd_duration # Must publish in find / getMore / explain command response format. if from_command: res = doc['data'][0] elif cmd_name == "explain": res = doc["data"][0] if doc["number_returned"] else {} else: res = {"cursor": {"id": doc["cursor_id"], "ns": self.__collection.full_name}, "ok": 1} if cmd_name == "find": res["cursor"]["firstBatch"] = doc["data"] else: res["cursor"]["nextBatch"] = doc["data"] listeners.publish_command_success( duration, res, cmd_name, rqst_id, self.__address) if from_command and cmd_name != "explain": cursor = doc['data'][0]['cursor'] self.__id = cursor['id'] if cmd_name == 'find': documents = cursor['firstBatch'] else: documents = cursor['nextBatch'] self.__data = deque(documents) self.__retrieved += len(documents) else: self.__id = doc["cursor_id"] self.__data = deque(doc["data"]) self.__retrieved += doc["number_returned"] if self.__id == 0: self.__killed = True if self.__limit and self.__id and self.__limit <= self.__retrieved: self.__die() # Don't wait for garbage collection to call __del__, return the # socket to the pool now. if self.__exhaust and self.__id == 0: self.__exhaust_mgr.close() def _refresh(self): """Refreshes the cursor with more data from Mongo. Returns the length of self.__data after refresh. Will exit early if self.__data is already non-empty. Raises OperationFailure when the cursor cannot be refreshed due to an error on the query. """ if len(self.__data) or self.__killed: return len(self.__data) if self.__id is None: # Query self.__send_message(_Query(self.__query_flags, self.__collection.database.name, self.__collection.name, self.__skip, self.__query_spec(), self.__projection, self.__codec_options, self.__read_preference, self.__limit, self.__batch_size, self.__read_concern)) if not self.__id: self.__killed = True elif self.__id: # Get More if self.__limit: limit = self.__limit - self.__retrieved if self.__batch_size: limit = min(limit, self.__batch_size) else: limit = self.__batch_size # Exhaust cursors don't send getMore messages. if self.__exhaust: self.__send_message(None) else: self.__send_message(_GetMore(self.__collection.database.name, self.__collection.name, limit, self.__id, self.__codec_options, self.__max_await_time_ms)) else: # Cursor id is zero nothing else to return self.__killed = True return len(self.__data) @property def alive(self): """Does this cursor have the potential to return more data? This is mostly useful with `tailable cursors <http://www.mongodb.org/display/DOCS/Tailable+Cursors>`_ since they will stop iterating even though they *may* return more results in the future. With regular cursors, simply use a for loop instead of :attr:`alive`:: for doc in collection.find(): print(doc) .. note:: Even if :attr:`alive` is True, :meth:`next` can raise :exc:`StopIteration`. :attr:`alive` can also be True while iterating a cursor from a failed server. In this case :attr:`alive` will return False after :meth:`next` fails to retrieve the next batch of results from the server. """ return bool(len(self.__data) or (not self.__killed)) @property def cursor_id(self): """Returns the id of the cursor Useful if you need to manage cursor ids and want to handle killing cursors manually using :meth:`~pymongo.mongo_client.MongoClient.kill_cursors` .. versionadded:: 2.2 """ return self.__id @property def address(self): """The (host, port) of the server used, or None. .. versionchanged:: 3.0 Renamed from "conn_id". """ return self.__address def __iter__(self): return self def next(self): """Advance the cursor.""" if self.__empty: raise StopIteration _db = self.__collection.database if len(self.__data) or self._refresh(): if self.__manipulate: return _db._fix_outgoing(self.__data.popleft(), self.__collection) else: return self.__data.popleft() else: raise StopIteration __next__ = next def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.__die() def __copy__(self): """Support function for `copy.copy()`. .. versionadded:: 2.4 """ return self._clone(deepcopy=False) def __deepcopy__(self, memo): """Support function for `copy.deepcopy()`. .. versionadded:: 2.4 """ return self._clone(deepcopy=True) def _deepcopy(self, x, memo=None): """Deepcopy helper for the data dictionary or list. Regular expressions cannot be deep copied but as they are immutable we don't have to copy them when cloning. """ if not hasattr(x, 'items'): y, is_list, iterator = [], True, enumerate(x) else: y, is_list, iterator = {}, False, iteritems(x) if memo is None: memo = {} val_id = id(x) if val_id in memo: return memo.get(val_id) memo[val_id] = y for key, value in iterator: if isinstance(value, (dict, list)) and not isinstance(value, SON): value = self._deepcopy(value, memo) elif not isinstance(value, RE_TYPE): value = copy.deepcopy(value, memo) if is_list: y.append(value) else: if not isinstance(key, RE_TYPE): key = copy.deepcopy(key, memo) y[key] = value return y
apache-2.0
2,228,343,693,537,399,800
36.039965
80
0.556189
false
4.535908
false
false
false
edlund/lut
fs/mkrandtree.py
2
8204
#!/usr/bin/env python # coding: UTF-8 # Copyright (C) 2015 Erik Edlund <erik.edlund@32767.se> # # Redistribution and use in source and binary forms, with or # without modification, are permitted provided that the # following conditions are met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials # provided with the distribution. # # * Neither the name of Erik Edlund, nor the names of its # contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from __future__ import division from __future__ import print_function from __future__ import unicode_literals import argparse import functools import itertools import os import random import string import sys import time # http://stackoverflow.com/a/3590105 def constrained_sum_sample_pos(n, total): """Return a randomly chosen list of n positive integers summing to total. Each such list is equally likely to occur.""" dividers = sorted(random.sample(xrange(1, total), n - 1)) return [a - b for a, b in zip(dividers + [total], [0] + dividers)] # http://stackoverflow.com/a/3590105 def constrained_sum_sample_nonneg(n, total): """Return a randomly chosen list of n nonnegative integers summing to total. Each such list is equally likely to occur.""" return [x - 1 for x in constrained_sum_sample_pos(n, total + n)] def generate_name(glyphs, max_length, alpha, beta): """Return a randomly generated name using the given set of glyphs. Use alpha and beta to adjust the distribution.""" part = 1 / max_length length = int(random.betavariate(alpha, beta) / part) + 1 return "".join([random.choice(glyphs) for _ in xrange(0, length)]) def compressable_bytes(n): """Generate n bytes which will compress relatively nicely.""" randbytes = [] while len(randbytes) < n: randbytes.append(ord(random.choice(string.hexdigits))) return bytearray(randbytes) def uncompressable_bytes(n): """Generate n random bytes which are difficult to compress.""" randbytes = [] for i in [random.getrandbits(32) for _ in xrange(int(n / 4) + 1)]: for shift in [0, 8, 16, 24]: if len(randbytes) < n: randbytes.append((i >> shift) & 0xff) return bytearray(randbytes) def create_directories(base_path, current_level, max_level, max_subdirs, get_name, verbose=False): """Create a random number of subdirectories (maybe none at all) in the given base_path. The probability of subdirectory creation decreases as the level of recursion increases.""" if current_level < 1: raise ValueError("current_level must be greater than or equal to 1") subdir_paths = [] if current_level > 1 else [base_path] if current_level > max_level or 1 != random.randint(1, current_level * 2): return subdir_paths for _ in range(0, random.randint(1, max_subdirs)): subdir_path = os.path.join(base_path, get_name()) while subdir_path in subdir_paths: subdir_path = os.path.join(base_path, get_name()) subdir_paths.append(subdir_path) os.mkdir(subdir_path) subdir_paths += create_directories(subdir_path, current_level + 1, max_level, max_subdirs, get_name) if verbose: print("dir: {name}".format(name=subdir_path)) return subdir_paths def create_files(directories, size_budget, max_file_size, get_name, verbose=False): """Create at least one nonempty file in each of the given directories. The sum of the size of all created files (excluding metadata) will be equal to the given size_budget.""" if size_budget / len(directories) < 1: raise ValueError("The given size_budget is too small") file_paths = [] dir_size_budgets = constrained_sum_sample_pos(len(directories), size_budget) if len(directories) > 1 else [size_budget] for dir_budget, dir_name in itertools.izip(dir_size_budgets, directories): while dir_budget: maximum = max_file_size if max_file_size < dir_budget else dir_budget file_size = random.randint(0, maximum) dir_budget -= file_size existing_paths = directories + file_paths file_path = os.path.join(dir_name, get_name()) while file_path in existing_paths: file_path = os.path.join(dir_name, get_name()) file_paths.append(file_path) file_content = globals()[args.file_content](file_size) with open(file_path, 'w') as f: f.write(file_content) if verbose: print("file: {name} ({size} bytes)".format(name=file_path, size=file_size)) return file_paths if __name__ == "__main__": argsparser = argparse.ArgumentParser( description="Create a pseudo-random filesystem hierarchy.", formatter_class=argparse.ArgumentDefaultsHelpFormatter) argsparser.add_argument( "--verbose", dest="verbose", metavar="", help="Be verbose", default=False, type=bool ) argsparser.add_argument( "--random-seed", dest="random_seed", metavar="", help="seed the random generator (default: time.time())", default=int(time.time()), type=int ) argsparser.add_argument( "--name-max-length", dest="name_max_length", metavar="", help="maximim name length returned by generate_name(...)", default=255, type=int ) argsparser.add_argument( "--name-alpha", dest="name_alpha", metavar="", help="alpha value for generate_name(...)", default=1.0, type=float ) argsparser.add_argument( "--name-beta", dest="name_beta", metavar="", help="beta value for generate_name(...)", default=7.0, type=float ) argsparser.add_argument( "--name-glyphs", dest="name_glyps", metavar="", help="available glyphs for generate_name(...)", default="abcdefghijklmnopqrstuvwxyz0123456789", type=str ) argsparser.add_argument( "--levels", dest="levels", metavar="", help="the maximum level of sub-directory recursion", default=2, type=int ) argsparser.add_argument( "--size-budget", dest="size_budget", metavar="", help="size in bytes for all created files", default=134217728, type=int ) argsparser.add_argument( "--max-file-size", dest="max_file_size", metavar="", help="maximum size in bytes a single created file", default=16777215, type=int ) argsparser.add_argument( "--max-sub-dirs", dest="max_subdirs", metavar="", help="maximum number of sub-directories per level", default=16, type=int ) argsparser.add_argument( "--file-content", dest="file_content", metavar="", help="the file content generator", default="uncompressable_bytes", type=str, choices=["uncompressable_bytes", "compressable_bytes"] ) argsparser.add_argument( "dirname", help="output directory", type=str ) args = argsparser.parse_args() print("dirname: {name}".format(name=args.dirname)) print("random seed: {seed}".format(seed=args.random_seed)) print("file content: {generator}".format(generator=args.file_content)) os.mkdir(args.dirname) random.seed(args.random_seed) get_name = functools.partial(generate_name, args.name_glyps, args.name_max_length, args.name_alpha, args.name_beta) directories = create_directories(args.dirname, 1, args.levels, args.max_subdirs, get_name, args.verbose) files = create_files(directories, args.size_budget, args.max_file_size, get_name, args.verbose) exit(0)
bsd-3-clause
-7,953,901,000,566,019,000
30.193916
72
0.714408
false
3.264624
false
false
false
geobeda/QgisCadastrePlugin-master
forms/cadastre_search_form.py
4
24268
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'forms/cadastre_search_form.ui' # # Created: Mon Jan 27 10:07:04 2014 # by: PyQt4 UI code generator 4.10 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_cadastre_search_form(object): def setupUi(self, cadastre_search_form): cadastre_search_form.setObjectName(_fromUtf8("cadastre_search_form")) cadastre_search_form.resize(339, 625) self.dockWidgetContents = QtGui.QWidget() self.dockWidgetContents.setObjectName(_fromUtf8("dockWidgetContents")) self.verticalLayout = QtGui.QVBoxLayout(self.dockWidgetContents) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.scrollArea_3 = QtGui.QScrollArea(self.dockWidgetContents) self.scrollArea_3.setWidgetResizable(True) self.scrollArea_3.setObjectName(_fromUtf8("scrollArea_3")) self.scrollAreaWidgetContents_3 = QtGui.QWidget() self.scrollAreaWidgetContents_3.setGeometry(QtCore.QRect(0, 0, 319, 580)) self.scrollAreaWidgetContents_3.setObjectName(_fromUtf8("scrollAreaWidgetContents_3")) self.verticalLayout_13 = QtGui.QVBoxLayout(self.scrollAreaWidgetContents_3) self.verticalLayout_13.setObjectName(_fromUtf8("verticalLayout_13")) self.grpLieu = QtGui.QGroupBox(self.scrollAreaWidgetContents_3) self.grpLieu.setObjectName(_fromUtf8("grpLieu")) self.verticalLayout_14 = QtGui.QVBoxLayout(self.grpLieu) self.verticalLayout_14.setObjectName(_fromUtf8("verticalLayout_14")) self.gridLayout = QtGui.QGridLayout() self.gridLayout.setObjectName(_fromUtf8("gridLayout")) self.label_12 = QtGui.QLabel(self.grpLieu) self.label_12.setMaximumSize(QtCore.QSize(70, 16777215)) self.label_12.setObjectName(_fromUtf8("label_12")) self.gridLayout.addWidget(self.label_12, 0, 0, 1, 1) self.label_2 = QtGui.QLabel(self.grpLieu) self.label_2.setObjectName(_fromUtf8("label_2")) self.gridLayout.addWidget(self.label_2, 2, 0, 1, 1) self.label = QtGui.QLabel(self.grpLieu) self.label.setObjectName(_fromUtf8("label")) self.gridLayout.addWidget(self.label, 1, 0, 1, 1) self.liCommune = QtGui.QComboBox(self.grpLieu) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.liCommune.sizePolicy().hasHeightForWidth()) self.liCommune.setSizePolicy(sizePolicy) self.liCommune.setObjectName(_fromUtf8("liCommune")) self.gridLayout.addWidget(self.liCommune, 0, 1, 1, 1) self.liSection = QtGui.QComboBox(self.grpLieu) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.liSection.sizePolicy().hasHeightForWidth()) self.liSection.setSizePolicy(sizePolicy) self.liSection.setObjectName(_fromUtf8("liSection")) self.gridLayout.addWidget(self.liSection, 1, 1, 1, 1) self.horizontalLayout_4 = QtGui.QHBoxLayout() self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4")) self.liParcelle = QtGui.QComboBox(self.grpLieu) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.liParcelle.sizePolicy().hasHeightForWidth()) self.liParcelle.setSizePolicy(sizePolicy) self.liParcelle.setEditable(True) self.liParcelle.setObjectName(_fromUtf8("liParcelle")) self.horizontalLayout_4.addWidget(self.liParcelle) self.gridLayout.addLayout(self.horizontalLayout_4, 2, 1, 1, 1) self.btResetCommune = QtGui.QToolButton(self.grpLieu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/cadastre/icons/delete.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.btResetCommune.setIcon(icon) self.btResetCommune.setObjectName(_fromUtf8("btResetCommune")) self.gridLayout.addWidget(self.btResetCommune, 0, 2, 1, 1) self.btResetSection = QtGui.QToolButton(self.grpLieu) self.btResetSection.setIcon(icon) self.btResetSection.setObjectName(_fromUtf8("btResetSection")) self.gridLayout.addWidget(self.btResetSection, 1, 2, 1, 1) self.btResetParcelle = QtGui.QToolButton(self.grpLieu) self.btResetParcelle.setIcon(icon) self.btResetParcelle.setObjectName(_fromUtf8("btResetParcelle")) self.gridLayout.addWidget(self.btResetParcelle, 2, 2, 1, 1) self.verticalLayout_14.addLayout(self.gridLayout) self.horizontalLayout_2 = QtGui.QHBoxLayout() self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2")) self.btCentrerLieu = QtGui.QPushButton(self.grpLieu) self.btCentrerLieu.setText(_fromUtf8("")) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/cadastre/icons/centrer.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.btCentrerLieu.setIcon(icon1) self.btCentrerLieu.setObjectName(_fromUtf8("btCentrerLieu")) self.horizontalLayout_2.addWidget(self.btCentrerLieu) self.btZoomerLieu = QtGui.QPushButton(self.grpLieu) self.btZoomerLieu.setText(_fromUtf8("")) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/cadastre/icons/zoom.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.btZoomerLieu.setIcon(icon2) self.btZoomerLieu.setObjectName(_fromUtf8("btZoomerLieu")) self.horizontalLayout_2.addWidget(self.btZoomerLieu) self.btSelectionnerLieu = QtGui.QPushButton(self.grpLieu) self.btSelectionnerLieu.setText(_fromUtf8("")) icon3 = QtGui.QIcon() icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/cadastre/icons/select.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.btSelectionnerLieu.setIcon(icon3) self.btSelectionnerLieu.setObjectName(_fromUtf8("btSelectionnerLieu")) self.horizontalLayout_2.addWidget(self.btSelectionnerLieu) self.btExportParcelle = QtGui.QPushButton(self.grpLieu) self.btExportParcelle.setText(_fromUtf8("")) icon4 = QtGui.QIcon() icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/cadastre/icons/releve.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.btExportParcelle.setIcon(icon4) self.btExportParcelle.setObjectName(_fromUtf8("btExportParcelle")) self.horizontalLayout_2.addWidget(self.btExportParcelle) self.verticalLayout_14.addLayout(self.horizontalLayout_2) self.verticalLayout_13.addWidget(self.grpLieu) self.grpAdresse = QtGui.QGroupBox(self.scrollAreaWidgetContents_3) self.grpAdresse.setObjectName(_fromUtf8("grpAdresse")) self.verticalLayout_6 = QtGui.QVBoxLayout(self.grpAdresse) self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6")) self.gridLayout_3 = QtGui.QGridLayout() self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3")) self.liAdresse = QtGui.QComboBox(self.grpAdresse) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.liAdresse.sizePolicy().hasHeightForWidth()) self.liAdresse.setSizePolicy(sizePolicy) self.liAdresse.setEditable(True) self.liAdresse.setObjectName(_fromUtf8("liAdresse")) self.gridLayout_3.addWidget(self.liAdresse, 0, 1, 1, 1) self.label_5 = QtGui.QLabel(self.grpAdresse) self.label_5.setObjectName(_fromUtf8("label_5")) self.gridLayout_3.addWidget(self.label_5, 0, 0, 1, 1) self.label_6 = QtGui.QLabel(self.grpAdresse) self.label_6.setMaximumSize(QtCore.QSize(60, 16777215)) self.label_6.setObjectName(_fromUtf8("label_6")) self.gridLayout_3.addWidget(self.label_6, 1, 0, 1, 1) self.liParcelleAdresse = QtGui.QComboBox(self.grpAdresse) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.liParcelleAdresse.sizePolicy().hasHeightForWidth()) self.liParcelleAdresse.setSizePolicy(sizePolicy) self.liParcelleAdresse.setEditable(True) self.liParcelleAdresse.setObjectName(_fromUtf8("liParcelleAdresse")) self.gridLayout_3.addWidget(self.liParcelleAdresse, 1, 1, 1, 1) self.btResetParcelleAdresse = QtGui.QToolButton(self.grpAdresse) self.btResetParcelleAdresse.setIcon(icon) self.btResetParcelleAdresse.setObjectName(_fromUtf8("btResetParcelleAdresse")) self.gridLayout_3.addWidget(self.btResetParcelleAdresse, 1, 2, 1, 1) self.btSearchAdresse = QtGui.QToolButton(self.grpAdresse) icon5 = QtGui.QIcon() icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/cadastre/icons/search.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.btSearchAdresse.setIcon(icon5) self.btSearchAdresse.setObjectName(_fromUtf8("btSearchAdresse")) self.gridLayout_3.addWidget(self.btSearchAdresse, 0, 2, 1, 1) self.verticalLayout_6.addLayout(self.gridLayout_3) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout")) self.btCentrerAdresse = QtGui.QPushButton(self.grpAdresse) self.btCentrerAdresse.setText(_fromUtf8("")) self.btCentrerAdresse.setIcon(icon1) self.btCentrerAdresse.setObjectName(_fromUtf8("btCentrerAdresse")) self.horizontalLayout.addWidget(self.btCentrerAdresse) self.btZoomerAdresse = QtGui.QPushButton(self.grpAdresse) self.btZoomerAdresse.setText(_fromUtf8("")) self.btZoomerAdresse.setIcon(icon2) self.btZoomerAdresse.setObjectName(_fromUtf8("btZoomerAdresse")) self.horizontalLayout.addWidget(self.btZoomerAdresse) self.btSelectionnerAdresse = QtGui.QPushButton(self.grpAdresse) self.btSelectionnerAdresse.setText(_fromUtf8("")) self.btSelectionnerAdresse.setIcon(icon3) self.btSelectionnerAdresse.setObjectName(_fromUtf8("btSelectionnerAdresse")) self.horizontalLayout.addWidget(self.btSelectionnerAdresse) self.btExportParcelleAdresse = QtGui.QPushButton(self.grpAdresse) self.btExportParcelleAdresse.setText(_fromUtf8("")) self.btExportParcelleAdresse.setIcon(icon4) self.btExportParcelleAdresse.setObjectName(_fromUtf8("btExportParcelleAdresse")) self.horizontalLayout.addWidget(self.btExportParcelleAdresse) self.verticalLayout_6.addLayout(self.horizontalLayout) self.verticalLayout_13.addWidget(self.grpAdresse) self.grpProprietaire = QtGui.QGroupBox(self.scrollAreaWidgetContents_3) self.grpProprietaire.setObjectName(_fromUtf8("grpProprietaire")) self.verticalLayout_2 = QtGui.QVBoxLayout(self.grpProprietaire) self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2")) self.gridLayout_2 = QtGui.QGridLayout() self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2")) self.liProprietaire = QtGui.QComboBox(self.grpProprietaire) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.liProprietaire.sizePolicy().hasHeightForWidth()) self.liProprietaire.setSizePolicy(sizePolicy) self.liProprietaire.setEditable(True) self.liProprietaire.setObjectName(_fromUtf8("liProprietaire")) self.gridLayout_2.addWidget(self.liProprietaire, 0, 1, 1, 1) self.label_3 = QtGui.QLabel(self.grpProprietaire) self.label_3.setMaximumSize(QtCore.QSize(60, 16777215)) self.label_3.setObjectName(_fromUtf8("label_3")) self.gridLayout_2.addWidget(self.label_3, 1, 0, 1, 1) self.liParcelleProprietaire = QtGui.QComboBox(self.grpProprietaire) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.liParcelleProprietaire.sizePolicy().hasHeightForWidth()) self.liParcelleProprietaire.setSizePolicy(sizePolicy) self.liParcelleProprietaire.setEditable(True) self.liParcelleProprietaire.setObjectName(_fromUtf8("liParcelleProprietaire")) self.gridLayout_2.addWidget(self.liParcelleProprietaire, 1, 1, 1, 1) self.label_4 = QtGui.QLabel(self.grpProprietaire) self.label_4.setObjectName(_fromUtf8("label_4")) self.gridLayout_2.addWidget(self.label_4, 0, 0, 1, 1) self.btSearchProprietaire = QtGui.QToolButton(self.grpProprietaire) self.btSearchProprietaire.setIcon(icon5) self.btSearchProprietaire.setObjectName(_fromUtf8("btSearchProprietaire")) self.gridLayout_2.addWidget(self.btSearchProprietaire, 0, 2, 1, 1) self.btResetParcelleProprietaire = QtGui.QToolButton(self.grpProprietaire) self.btResetParcelleProprietaire.setIcon(icon) self.btResetParcelleProprietaire.setObjectName(_fromUtf8("btResetParcelleProprietaire")) self.gridLayout_2.addWidget(self.btResetParcelleProprietaire, 1, 2, 1, 1) self.btExportParcelleProprietaire = QtGui.QToolButton(self.grpProprietaire) self.btExportParcelleProprietaire.setIcon(icon4) self.btExportParcelleProprietaire.setObjectName(_fromUtf8("btExportParcelleProprietaire")) self.gridLayout_2.addWidget(self.btExportParcelleProprietaire, 1, 3, 1, 1) self.btExportProprietaire = QtGui.QToolButton(self.grpProprietaire) self.btExportProprietaire.setIcon(icon4) self.btExportProprietaire.setObjectName(_fromUtf8("btExportProprietaire")) self.gridLayout_2.addWidget(self.btExportProprietaire, 0, 3, 1, 1) self.verticalLayout_2.addLayout(self.gridLayout_2) self.horizontalLayout_3 = QtGui.QHBoxLayout() self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3")) self.btCentrerProprietaire = QtGui.QPushButton(self.grpProprietaire) self.btCentrerProprietaire.setText(_fromUtf8("")) self.btCentrerProprietaire.setIcon(icon1) self.btCentrerProprietaire.setObjectName(_fromUtf8("btCentrerProprietaire")) self.horizontalLayout_3.addWidget(self.btCentrerProprietaire) self.btZoomerProprietaire = QtGui.QPushButton(self.grpProprietaire) self.btZoomerProprietaire.setText(_fromUtf8("")) self.btZoomerProprietaire.setIcon(icon2) self.btZoomerProprietaire.setObjectName(_fromUtf8("btZoomerProprietaire")) self.horizontalLayout_3.addWidget(self.btZoomerProprietaire) self.btSelectionnerProprietaire = QtGui.QPushButton(self.grpProprietaire) self.btSelectionnerProprietaire.setText(_fromUtf8("")) self.btSelectionnerProprietaire.setIcon(icon3) self.btSelectionnerProprietaire.setObjectName(_fromUtf8("btSelectionnerProprietaire")) self.horizontalLayout_3.addWidget(self.btSelectionnerProprietaire) self.verticalLayout_2.addLayout(self.horizontalLayout_3) self.verticalLayout_13.addWidget(self.grpProprietaire) self.txtLog = QtGui.QTextEdit(self.scrollAreaWidgetContents_3) self.txtLog.setObjectName(_fromUtf8("txtLog")) self.verticalLayout_13.addWidget(self.txtLog) spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.verticalLayout_13.addItem(spacerItem) self.scrollArea_3.setWidget(self.scrollAreaWidgetContents_3) self.verticalLayout.addWidget(self.scrollArea_3) cadastre_search_form.setWidget(self.dockWidgetContents) self.retranslateUi(cadastre_search_form) QtCore.QMetaObject.connectSlotsByName(cadastre_search_form) cadastre_search_form.setTabOrder(self.scrollArea_3, self.liAdresse) cadastre_search_form.setTabOrder(self.liAdresse, self.btSearchAdresse) cadastre_search_form.setTabOrder(self.btSearchAdresse, self.liParcelleAdresse) cadastre_search_form.setTabOrder(self.liParcelleAdresse, self.btResetParcelleAdresse) cadastre_search_form.setTabOrder(self.btResetParcelleAdresse, self.btCentrerAdresse) cadastre_search_form.setTabOrder(self.btCentrerAdresse, self.btZoomerAdresse) cadastre_search_form.setTabOrder(self.btZoomerAdresse, self.btSelectionnerAdresse) cadastre_search_form.setTabOrder(self.btSelectionnerAdresse, self.btExportParcelleAdresse) cadastre_search_form.setTabOrder(self.btExportParcelleAdresse, self.liCommune) cadastre_search_form.setTabOrder(self.liCommune, self.btResetCommune) cadastre_search_form.setTabOrder(self.btResetCommune, self.liSection) cadastre_search_form.setTabOrder(self.liSection, self.btResetSection) cadastre_search_form.setTabOrder(self.btResetSection, self.liParcelle) cadastre_search_form.setTabOrder(self.liParcelle, self.btResetParcelle) cadastre_search_form.setTabOrder(self.btResetParcelle, self.btCentrerLieu) cadastre_search_form.setTabOrder(self.btCentrerLieu, self.btZoomerLieu) cadastre_search_form.setTabOrder(self.btZoomerLieu, self.btSelectionnerLieu) cadastre_search_form.setTabOrder(self.btSelectionnerLieu, self.btExportParcelle) cadastre_search_form.setTabOrder(self.btExportParcelle, self.liProprietaire) cadastre_search_form.setTabOrder(self.liProprietaire, self.btSearchProprietaire) cadastre_search_form.setTabOrder(self.btSearchProprietaire, self.btExportProprietaire) cadastre_search_form.setTabOrder(self.btExportProprietaire, self.liParcelleProprietaire) cadastre_search_form.setTabOrder(self.liParcelleProprietaire, self.btResetParcelleProprietaire) cadastre_search_form.setTabOrder(self.btResetParcelleProprietaire, self.btExportParcelleProprietaire) cadastre_search_form.setTabOrder(self.btExportParcelleProprietaire, self.btCentrerProprietaire) cadastre_search_form.setTabOrder(self.btCentrerProprietaire, self.btZoomerProprietaire) cadastre_search_form.setTabOrder(self.btZoomerProprietaire, self.btSelectionnerProprietaire) cadastre_search_form.setTabOrder(self.btSelectionnerProprietaire, self.txtLog) def retranslateUi(self, cadastre_search_form): cadastre_search_form.setWindowTitle(_translate("cadastre_search_form", "Cadastre - Outils de recherche", None)) self.grpLieu.setTitle(_translate("cadastre_search_form", "Recherche de lieux", None)) self.label_12.setText(_translate("cadastre_search_form", "Commune", None)) self.label_2.setText(_translate("cadastre_search_form", "Parcelle", None)) self.label.setText(_translate("cadastre_search_form", "Section", None)) self.btResetCommune.setToolTip(_translate("cadastre_search_form", "Retour à l\'ensemble des communes", None)) self.btResetCommune.setText(_translate("cadastre_search_form", "...", None)) self.btResetSection.setToolTip(_translate("cadastre_search_form", "Retour à l\'ensemble des sections", None)) self.btResetSection.setText(_translate("cadastre_search_form", "...", None)) self.btResetParcelle.setToolTip(_translate("cadastre_search_form", "Retour à l\'ensemble des parcelles", None)) self.btResetParcelle.setText(_translate("cadastre_search_form", "...", None)) self.btCentrerLieu.setToolTip(_translate("cadastre_search_form", "Centrer sur l\'objet", None)) self.btZoomerLieu.setToolTip(_translate("cadastre_search_form", "Zoomer sur l\'objet", None)) self.btSelectionnerLieu.setToolTip(_translate("cadastre_search_form", "Sélectionner l\'objet", None)) self.btExportParcelle.setToolTip(_translate("cadastre_search_form", "Exporter le relevé parcellaire", None)) self.grpAdresse.setTitle(_translate("cadastre_search_form", "Recherche d\'adresse", None)) self.label_5.setText(_translate("cadastre_search_form", "Adresse", None)) self.label_6.setText(_translate("cadastre_search_form", "Parcelle", None)) self.btResetParcelleAdresse.setToolTip(_translate("cadastre_search_form", "Retour à l\'ensemble des parcelles", None)) self.btResetParcelleAdresse.setText(_translate("cadastre_search_form", "...", None)) self.btSearchAdresse.setToolTip(_translate("cadastre_search_form", "Lancer la recherche", None)) self.btSearchAdresse.setText(_translate("cadastre_search_form", "...", None)) self.btCentrerAdresse.setToolTip(_translate("cadastre_search_form", "Centrer sur la/les parcelle(s)", None)) self.btZoomerAdresse.setToolTip(_translate("cadastre_search_form", "Zoomer sur la/les parcelle(s)", None)) self.btSelectionnerAdresse.setToolTip(_translate("cadastre_search_form", "Sélectionner la/les parcelle(s)", None)) self.btExportParcelleAdresse.setToolTip(_translate("cadastre_search_form", "Exporter le relevé parcellaire", None)) self.grpProprietaire.setTitle(_translate("cadastre_search_form", "Recherche de propriétaire", None)) self.label_3.setText(_translate("cadastre_search_form", "Parcelle", None)) self.label_4.setText(_translate("cadastre_search_form", "Nom", None)) self.btSearchProprietaire.setToolTip(_translate("cadastre_search_form", "Lancer la recherche", None)) self.btSearchProprietaire.setText(_translate("cadastre_search_form", "...", None)) self.btResetParcelleProprietaire.setToolTip(_translate("cadastre_search_form", "Retour à l\'ensemble des parcelles", None)) self.btResetParcelleProprietaire.setText(_translate("cadastre_search_form", "...", None)) self.btExportParcelleProprietaire.setToolTip(_translate("cadastre_search_form", "Exporter le relevé parcellaire", None)) self.btExportParcelleProprietaire.setText(_translate("cadastre_search_form", "...", None)) self.btExportProprietaire.setToolTip(_translate("cadastre_search_form", "Exporter le relevé de propriété", None)) self.btExportProprietaire.setText(_translate("cadastre_search_form", "...", None)) self.btCentrerProprietaire.setToolTip(_translate("cadastre_search_form", "Centrer sur la/les parcelle(s)", None)) self.btZoomerProprietaire.setToolTip(_translate("cadastre_search_form", "Zoomer sur la/les parcelle(s)", None)) self.btSelectionnerProprietaire.setToolTip(_translate("cadastre_search_form", "Sélectionner la/les parcelle(s)", None)) self.txtLog.setHtml(_translate("cadastre_search_form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n" "<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n" "p, li { white-space: pre-wrap; }\n" "</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n" "<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>", None)) import resource_rc
gpl-3.0
-674,153,083,256,626,700
67.705382
180
0.732363
false
3.361469
false
false
false
BrambleLLC/H4H_2014
keygen.py
1
2242
import hashlib import time TYPE_USER = 0 TYPE_ADMIN = 1 #TODO: remove random element from seed generation def gen_key(username, key_type=TYPE_USER): current_time = str(int(round(time.time() * 1000))) sig = username + str(current_time) seed = (int(hashlib.sha512(sig).hexdigest(), 16)) >> 480 key_bytes = [0, 0, 0, 0] key_bytes[0] = gen_key_byte(seed, 9, 3, 20) key_bytes[1] = gen_key_byte(seed, 5, 0, 6) key_bytes[2] = gen_key_byte(seed, 1, 2, 91) key_bytes[3] = gen_key_byte(seed, 7, 1, 100) key = hex(seed).replace('0x', '').replace('L', '') for i in range(4): result = hex(key_bytes[i]).replace('0x', '').replace('L', '') while len(result) < 2: result = '0' + result key += result key += checksum(key, key_type) i = len(key) - 4 while i > 1: key = key[:i] + '-' + key[i:] i -= 4 return key.upper() def gen_key_byte(seed, a, b, c): a %= 25 b %= 3 if a % 2 == 0: return ((seed >> a) & 0x000000FF) ^ ((seed >> b) | c) else: return ((seed >> a) & 0x000000FF) ^ ((seed >> b) & c) def checksum(key_partial, key_type): a = 0 b = 0 sum_ = 0 for i in range(len(key_partial)): b += ord(key_partial[i]) if b > 0x00FF: b -= 0x00FF a += b if a > 0x00FF: a -= 0x00FF if key_type == TYPE_USER: sum_ = (a << 8) + b elif key_type == TYPE_ADMIN: sum_ = (b << 8) + a result = hex(sum_).replace('0x', '') while len(result) < 4: result = '0' + result return result def check_key(key): key = key.lower().replace('-', '') if len(key) != 20: return [False, None] chksm = key[16:] key = key[:16] if checksum(key, TYPE_USER) == chksm: return [True, TYPE_USER] elif checksum(key, TYPE_ADMIN) == chksm: return [True, TYPE_ADMIN] else: return [False, None] def is_key(key): if len(key) != 24: return False if key.count('-') != 4: return False key = key.lower().replace('-', '') valid_chars = '1234567890abcdef' for char in key: if char not in valid_chars: return False return True
mit
-8,204,002,816,051,934,000
24.781609
69
0.512935
false
3.046196
false
false
false
openkamer/openkamer
scraper/documents.py
1
4689
import logging import requests import lxml.html import lxml.etree logger = logging.getLogger(__name__) def get_html_content(document_id): url = 'https://zoek.officielebekendmakingen.nl/{}.html'.format(document_id) response = requests.get(url, timeout=60) tree = lxml.html.fromstring(response.content) elements = tree.xpath('//div[@class="stuk"]') if not elements: elements = tree.xpath('//div[contains(@class, "stuk")]') if not elements: logger.error('no document content found for document url: ' + url) elements = tree.xpath('//main[@class="global-main"]') html_content = lxml.etree.tostring(elements[0]) return html_content def get_metadata(document_id): logger.info('get metadata url for document id: {}'.format(document_id)) url = 'https://zoek.officielebekendmakingen.nl/{}'.format(document_id) response = requests.get(url, timeout=60) # get redirected urls (new document ids) xml_url = response.url + '/metadata.xml' logger.info('get metadata url: ' + xml_url) page = requests.get(xml_url, timeout=60) tree = lxml.etree.fromstring(page.content) attributes_transtable = { 'DC.type': 'types', 'OVERHEIDop.dossiernummer': 'dossier_ids', 'DC.title': 'title_full', 'OVERHEIDop.documenttitel': 'title_short', 'OVERHEIDop.indiener': 'submitter', 'OVERHEIDop.ontvanger': 'receiver', 'OVERHEIDop.ondernummer': 'id_sub', 'OVERHEIDop.publicationName': 'publication_type', 'DCTERMS.issued': 'date_published', 'DCTERMS.available': 'date_available', 'OVERHEIDop.datumBrief': 'date_letter_sent', 'OVERHEIDop.datumIndiening': 'date_submitted', 'OVERHEIDop.datumOntvangst': 'date_received', 'OVERHEIDop.datumVergadering': 'date_meeting', 'OVERHEID.organisationType': 'organisation_type', 'OVERHEID.category': 'category', 'DC.creator': 'publisher', "OVERHEIDop.vergaderjaar": 'vergaderjaar', "OVERHEIDop.vraagnummer": 'vraagnummer', "OVERHEIDop.aanhangsel": 'aanhangsel', "DC.identifier": 'overheidnl_document_id', } metadata = {} for key, name in attributes_transtable.items(): elements = tree.xpath('/metadata_gegevens/metadata[@name="{}"]'.format(key)) if not elements: # logger.warning('' + key + ' was not found') metadata[name] = '' continue if len(elements) > 1: # if name == 'category' or name == 'submitter': metadata[name] = '' for element in elements: if metadata[name]: metadata[name] += '|' metadata[name] += element.get('content') else: metadata[name] = elements[0].get('content') if not metadata['title_short']: elements = tree.xpath('/metadata_gegevens/metadata[@name="DC.type"]') if elements: metadata['title_short'] = elements[0].get('content') """ agenda code """ metadata['is_agenda'] = False elements = tree.xpath('/metadata_gegevens/metadata[@name="DC.type"]') for element in elements: if element.get('scheme') == 'OVERHEIDop.Parlementair': metadata['is_agenda'] = element.get('content') == 'Agenda' elements = tree.xpath('/metadata_gegevens/metadata[@name="OVERHEIDop.behandeldDossier"]') metadata['behandelde_dossiers'] = [] for element in elements: metadata['behandelde_dossiers'].append(element.get('content')) logger.info('get metadata url for document id: ' + str(document_id) + ' - END') return metadata # def search_politieknl_dossier(dossier_id): # dossier_url = 'https://zoek.officielebekendmakingen.nl/resultaten' # document_ids = [] # pagnr = 1 # max_pages = 10 # while pagnr < max_pages: # logger.info('reading page: ' + str(pagnr)) # params = { # 'pagina': pagnr, # 'q': '(dossiernummer="{}")'.format(dossier_id), # 'pg': 100 # } # pagnr += 1 # response = requests.get(dossier_url, params, timeout=60) # tree = lxml.html.fromstring(response.content) # li_elements = tree.xpath('//ol[@id="PublicatieList"]/li') # if not li_elements: # break # for list_element in li_elements: # document_link = list_element.xpath('div[@class="result-item"]/a')[0] # relative_url = document_link.get('href') # document_id = relative_url.replace('/', '').replace('.html', '') # document_ids.append(document_id) # return document_ids
mit
-7,506,058,310,461,450,000
39.076923
93
0.603114
false
3.40029
false
false
false
brucewinter/myhouse
air.py
1
26554
# -*- coding: utf-8 -*- # Monitor air quality, graph it to a local LCD, send it to a smarthouse via mqtt, and control a range hood fan for smoke control # More info at: # https://www.youtube.com/brucewinter # https://github.com/brucewinter # Note: # Local fan control via switchbot is done from the smart house program via mqtt, for more sophisticated control (e.g. hysteresis) # Simple control could be done directly without a smart house. # pip3 install RPI.GPIO # pip3 install Adafruit-Blinka # pip3 install adafruit_pm25 # pip3 install adafruit-circuitpython-pm25 # pip3 install adafruit-circuitpython-sgp30 # pip3 install adafruit-circuitpython-aqi # pip3 install aqi # pip3 install python-aqi # pip3 install adafruit-circuitpython-rgb_display # pip3 install paho-mqtt # pip3 install bluepy import os, sys, time, math, subprocess, json, binascii, aqi, requests import digitalio, board, busio import adafruit_rgb_display.st7789 as st7789 import paho.mqtt.client as mqtt #rom adafruit_rgb_display.rgb import color565 from adafruit_pm25.i2c import PM25_I2C from PIL import Image, ImageDraw, ImageFont sensor_inst = sys.argv[1] sensor_pm25 = 1 sensor_sgp30 = 0 sensor_ahtx0 = 0 sensor_bme680 = 0 sensor_scd30 = 0 sensor_switchbot = 0 if (len(sys.argv) > 2): if sys.argv[2].find('sgp30') != -1: sensor_sgp30 = 1 if sys.argv[2].find('ahtx0') != -1: sensor_ahtx0 = 1 if sys.argv[2].find('bme680') != -1: sensor_bme680 = 1 if sys.argv[2].find('scd30') != -1: sensor_scd30 = 1 if sys.argv[2].find('switchbot') != -1: sensor_switchbot = 1 air_data = {} air_data_in = {} air_data_bed = {} air_data_out = {} air_data_file = 'air_data.json' start_time = time.time() watchdog = 0 # Get previous air data for plots def load_data(): global air_data print('Loading previous air history') if os.path.isfile(air_data_file): f = open(air_data_file) try: air_data = json.load(f) except: print('Unable to load jason data') if 'g1' not in air_data: air_data['g1'] = {'aqi_in' : [], 'aqi_out' : [], 'aqi_out_i' : [], 'tvoc' : [], 'co2' : []} if 'g2' not in air_data: air_data['g2'] = {'aqi_in' : [], 'aqi_out' : [], 'aqi_out_i' : [], 'tvoc' : [], 'co2' : []} if 'g3' not in air_data: air_data['g3'] = {'aqi_in' : [], 'aqi_out' : [], 'aqi_out_i' : [], 'tvoc' : [], 'co2' : []} # if 'g1' not in air_data: air_data['g1'] = {'p003' : [], 'pm010s' : [], 'aqi_out' : [], 'aqi_pm25' : [], 'voc' : []} # 40 min chart # if 'g2' not in air_data: air_data['g2'] = {'p003' : [], 'pm010s' : [], 'aqi_out' : [], 'aqi_pm25' : [], 'voc' : []} # 4 hour chart # if 'g3' not in air_data: air_data['g3'] = {'p003' : [0], 'pm010s' : [0], 'aqi_out' : [0], 'aqi_pm25' : [0], 'voc' : [0]} # max chart # if 'g4' not in air_data: air_data['g4'] = {'p003' : [999], 'pm010s' : [999], 'aqi_out' : [999], 'aqi_pm25' : [999], 'voc' : [999]} # min chart # print(air_data) def setup_mqtt(): global mqttc print('air mqtt setup') mqttc = mqtt.Client() mqttc.on_connect = on_connect mqttc.on_message = on_message # mqttc.on_log = on_log # Use this for debugging errors in callbacks mqttc.username_pw_set(username='my_name',password='my_password') mqttc.connect('192.168.1.123') def on_connect(mqttc, obj, flags, rc): print('air connect rc: '+str(rc)) mqttc.subscribe('ha/heartbeat') mqttc.subscribe('sensor/+') mqttc.subscribe('ha/stove') data = {'td' : ' '} def on_message(mqttc, obj, msg): global air_data_in, air_data_out, air_data_bed, watchdog p = msg.payload.decode('utf-8') # print('air message: ' + str(msg.topic) + ' = ' + p) if msg.topic == 'ha/heartbeat' : data['td'] = time.strftime('%a %d %I:%M:%S', time.localtime(int(p) / 1000)) watchdog = 0 if msg.topic == 'sensor/Outside Temperature' : data['tout'] = p if msg.topic == 'sensor/Upstairs Temperature' : data['tin'] = p if msg.topic == 'sensor/Upstairs Temperature' : data['tin'] = p if msg.topic == 'sensor/Air AQI Out I' : data['aqi_out'] = p # if msg.topic == 'sensor/air out' : data['aqi_out'] = p if msg.topic == 'sensor/air in' : air_data_in = json.loads(p) if msg.topic == 'sensor/air out' : air_data_out = json.loads(p) if msg.topic == 'sensor/air bedroom' : air_data_bed = json.loads(p) # Compensate to humidity. Requires Absolute humidity. This formula is accurate to within 0.1% over the temperature range –30°C to +35°C # Absolute Humidity (grams/m3) = 6.112 × e^[(17.67 × T)/(T+243.5)] × rh × 2.1674 if msg.topic == 'sensor/Upstairs Humidity': data['thum'] = p tin = float(data['tin']) t1 = (17.67 * tin)/(tin + 243.5) ah = 6.112 * math.e**t1 * float(p) * 2.1674 print('Humidity adjustment: t1=%s t=%s h=%s ah=%s' % (t1, data['tin'], p, ah)) if (sensor_sgp30): sgp30.set_iaq_humidity(ah) if msg.topic == 'ha/stove' and sensor_switchbot : if (1): rc = os.system('/mnt/nas/bin/switchbot_stove ' + p) print('rc=' + str(rc)) mac = 'EF:ED:2F:B6:7A:0A' if p == 'on' else 'FA:CC:32:1F:DF:3F' else: # This stopped working, connect fails, not sure why. Reverted back to system call which still works. i = 0 while (i < 4): try: print('Connecting... ' + mac) p = Peripheral(mac, 'random') break except: print('Bluetooth connect failed, retry i=' + str(i)) i += 1 time.sleep(1) print('connected') hand_service = p.getServiceByUUID('cba20d00-224d-11e6-9fb8-0002a5d5c51b') hand = hand_service.getCharacteristics('cba20002-224d-11e6-9fb8-0002a5d5c51b')[0] hand.write(binascii.a2b_hex('570100')) print('switchbot done') def on_log(mqttc, obj, level, string): print(string) def setup_sensors(): global pm25, sgp30, bme680, ahtx0, scd30 reset_pin = None i2c = busio.I2C(board.SCL, board.SDA, frequency=100000) # Create library object, use 'slow' 100KHz frequency! if (sensor_pm25): print('Setting up PM2.5 sensor') # pm25 = adafruit_pm25.PM25_I2C(i2c, reset_pin) # Connect to a PM2.5 sensor over I2C pm25 = PM25_I2C(i2c, reset_pin) # Connect to a PM2.5 sensor over I2C if (sensor_sgp30): print('Setting up sgp30') import adafruit_sgp30 sgp30 = adafruit_sgp30.Adafruit_SGP30(i2c) if 'sgp30_baseline_co2' in air_data: print('Found SGP30 sensor, setting baseline: co2=%s voc=%s' % (air_data['sgp30_baseline_co2'], air_data['sgp30_baseline_voc'])) sgp30.iaq_init() # Initialize the IAQ algorithm. sgp30.set_iaq_baseline(air_data['sgp30_baseline_co2'], air_data['sgp30_baseline_voc']) if (sensor_ahtx0): print('Setting up ahtx0 sensor') import adafruit_ahtx0 ahtx0 = adafruit_ahtx0.AHTx0(i2c) if (sensor_switchbot): print('Setting up bluetooth') from bluepy.btle import Peripheral # The Bosch bme680 sensor is a pain, does not return VOC or AQI, only raw gas resistance (high res -> lower voc) # https://learn.adafruit.com/adafruit-bme680-humidity-temperature-barometic-pressure-voc-gas/bsec-air-quality-library # The Bosch BSEC library is an all-in-one Arduino library that will get you all the values from the sensor and also perform the AQI calculations. It is not an open source library! You can only use it in Arduino and only with the chipsets supported. # https://github.com/adafruit/Adafruit_CircuitPython_BME680/blob/master/adafruit_bme680.py if (sensor_bme680): print('Setting up bme680 sensor') import adafruit_bme680 bme680 = adafruit_bme680.Adafruit_BME680_I2C(i2c) bme680.pressure_oversample = 8 bme680.temperature_oversample = 8 bme680.humidity_oversample = 8 if (sensor_scd30): print('Setting up scd30 sensor') import adafruit_scd30 while 1: try: print('Connecting to scd30') scd30 = adafruit_scd30.SCD30(i2c) break except: print('Failed, retrying...') time.sleep(1) # scd30.measurement_interval = 4 # scd30.ambient_pressure = 1100 # Units?? scd30.altitude = 207 # In meters, 680 feet # scd30.forced_recalibration_reference = 409 # Use self_calibration instead scd30.self_calibration_enabled = True def setup_display(): global my_disp, draw, font, fontsize, image, rotation, height, width, top, bottom, backlight, buttonA, buttonB # Configuration for CS and DC pins (these are FeatherWing defaults on M0/M4): cs_pin = digitalio.DigitalInOut(board.CE0) dc_pin = digitalio.DigitalInOut(board.D25) reset_pin = None BAUDRATE = 64000000 # Config for display baudrate (default max is 24mhz): spi = board.SPI() # Setup SPI bus using hardware SPI: my_disp = st7789.ST7789( spi, cs=cs_pin, dc=dc_pin, rst=reset_pin, baudrate=BAUDRATE, width=240, height=240, x_offset=0, y_offset=80, ) # Create blank image for drawing. Make sure to create image with mode 'RGB' for full color. height = my_disp.width # we swap height/width to rotate it to landscape! width = my_disp.height image = Image.new('RGB', (width, height)) rotation = 180 padding = -2 top = padding bottom = height - padding draw = ImageDraw.Draw(image) # Get drawing object to draw on image. # Alternatively load a TTF font. Make sure the .ttf font file is in the same directory as the python script! # Some other nice fonts to try: http://www.dafont.com/bitmap.php font = ImageFont.truetype('/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf', 24) fontsize = font.getsize('hi')[1] # Turn on the backlight backlight = digitalio.DigitalInOut(board.D22) backlight.switch_to_output() backlight.value = True # Setup up buttons buttonA = digitalio.DigitalInOut(board.D23) buttonB = digitalio.DigitalInOut(board.D24) buttonA.switch_to_input() buttonB.switch_to_input() def get_cpu_data(): # print('1', end='', flush=True) # cmd = "hostname -I | cut -d' ' -f1" # data['ip'] = 'IP: ' + subprocess.check_output(cmd, shell=True).decode('utf-8') # cmd = "free -m | awk 'NR==2{printf \"Mem: %s/%s MB %.2f%%\", $3,$2,$3*100/$2 }'" # data['mem'] = subprocess.check_output(cmd, shell=True).decode("utf-8") # cmd = 'df -h | awk \'$NF=="/"{printf "Disk: %d/%d GB %s", $3,$2,$5}\'' # data['disk'] = subprocess.check_output(cmd, shell=True).decode("utf-8") # cmd = "top -bn1 | grep load | awk '{printf \"%.2f\", $(NF-2)}'" cmd = "cat /proc/loadavg | cut -d' ' -f1" data['cload'] = subprocess.check_output(cmd, shell=True).decode("utf-8").rstrip() cmd = "cat /sys/class/thermal/thermal_zone0/temp | awk '{printf \"%i\", $(NF-0) / 1000}'" # pylint: disable=line-too-long data['ctemp'] = subprocess.check_output(cmd, shell=True).decode("utf-8") cmd = "uptime | cut -d',' -f1 | cut -d' ' -f4,5" data['uptime'] = subprocess.check_output(cmd, shell=True).decode("utf-8") p003_prev = 0 def get_air_data(): global air_data, pm25_data, p003_prev # print('*', end='', flush=True) print('*', flush=True) pm25_data = {} if (sensor_pm25): try: pm25_data = pm25.read() # print(pm25_data) except RuntimeError: print('Unable to read from pm25 sensor') return # pm25_data['co2'] = 0 pm25_data['tvoc'] = 0 pm25_data['gas'] = 0 pm25_data['pressure'] = 0 pm25_data['humidity'] = 0 pm25_data['temperature'] = 0 if (sensor_sgp30): try: eCO2, TVOC = sgp30.iaq_measure() data['eco2'] = 'co2=%d voc=%d' % (eCO2, TVOC) # Do not pass data back to house for the first 20 seconds, it give bogus data before it is warmed up if (time.time() - start_time) > 20: # pm25_data['co2'] = eCO2 # Store in pm25 structure, so we can pass back to mqtt easily pm25_data['eco2'] = eCO2 pm25_data['tvoc'] = TVOC co2_base, voc_base = sgp30.baseline_eCO2, sgp30.baseline_TVOC air_data['sgp30_baseline_co2'], air_data['sgp30_baseline_voc'] = co2_base, voc_base baseline = 'base co2=%d voc=%d' % (co2_base, voc_base) print(data['eco2'] + ' ' + baseline) except RuntimeError: print('Unable to read from sgp30 sensor') return pm010s = pm25_data.get('pm10 standard', 0) pm025s = pm25_data.get('pm25 standard', 0) pm100s = pm25_data.get('pm100 standard', 0) pm010e = pm25_data.get('pm10 env', 0) pm025e = pm25_data.get('pm25 env', 0) pm100e = pm25_data.get('pm100 env', 0) p003 = pm25_data.get('particles 03um', 0) p005 = pm25_data.get('particles 05um', 0) p010 = pm25_data.get('particles 10um', 0) p025 = pm25_data.get('particles 25um', 0) p050 = pm25_data.get('particles 50um', 0) p100 = pm25_data.get('particles 100um', 0) if (sensor_ahtx0): pm25_data['temperature'] = 32 + (9/5)*ahtx0.temperature pm25_data['humidity'] = ahtx0.relative_humidity print("ahtx0 temperature: %0.1f F, humidity=%3i" % (pm25_data['temperature'], pm25_data['humidity'])) if (sensor_bme680): try: pm25_data['gas'] = bme680.gas pm25_data['pressure'] = bme680.pressure / 33.8639 # Convert from kPa to inHg (inches of mercury) pm25_data['humidity'] = bme680.humidity pm25_data['temperature'] = 32 + (9/5)*bme680.temperature print("bme680 temperature: %0.1f F, humidity=%3i" % (pm25_data['temperature'], pm25_data['humidity'])) except RuntimeError: print('Unable to read from bme680 sensor') return if (sensor_scd30): try: if scd30.data_available: pm25_data['temperature'] = 32 + (9/5)*scd30.temperature pm25_data['humidity'] = int(scd30.relative_humidity) pm25_data['co2'] = int(scd30.CO2) data['co2'] = 'co2=%d' % (pm25_data['co2']) print("scd30 temperature: %0.1f F, humidity=%3i, co2=%4i" % (pm25_data['temperature'], pm25_data['humidity'], pm25_data['co2'])) except RuntimeError: print('Unable to read from sdc30 sensor') return print('#', end='', flush=True) # Avoid abend with values > 500: File "/home/pi/.local/lib/python3.7/site-packages/aqi/algos/base.py", line 91, in iaqi (aqilo, aqihi) = self.piecewise['aqi'][idx] IndexError: list index out of range if (pm025s < 500): aqi_pm10 = int(aqi.to_iaqi(aqi.POLLUTANT_PM10, pm010s, algo=aqi.ALGO_EPA)) aqi_pm25 = int(aqi.to_iaqi(aqi.POLLUTANT_PM25, pm025s, algo=aqi.ALGO_EPA)) pm25_data['aqi pm10'] = aqi_pm10 pm25_data['aqi pm25'] = aqi_pm25 else: # use previous data aqi_pm10 = pm25_data['aqi pm10'] aqi_pm25 = pm25_data['aqi pm25'] aqi_in = int(air_data_in.get( 'aqi pm25', 0)) aqi_bed = int(air_data_bed.get('aqi pm25', 0)) aqi_out = int(air_data_out.get('aqi pm25', 0)) aqi_out_i = int(data.get('aqi_out', 0)) tvoc = pm25_data['tvoc'] gas = pm25_data['gas'] data['rpms'] = 'S: ' + str(pm010s) + ' ' + str(pm025s) + ' ' + str(pm100s) data['aqi'] = 'A: I:' + str(aqi_in) + ' B:' + str(aqi_bed) + ' O:' + str(aqi_bed) + ' ' + str(aqi_out_i) data['rp1'] = 'P: ' + str(p003) + ' ' + str(p010) + ' ' + str(p100) # data['rp1'] = 'P: ' + str(p003) + ' ' + str(p005) + ' ' + str(p010) data['rp2'] = 'P: ' + str(p025) + ' ' + str(p050) + ' ' + str(p100) p003 = int(p003) # Is always > 10, so need for floating point p003l = int(10 * math.log10(p003)) if p003 > 0 else 0 pm010sl = 220 if pm010s > 220 else pm010s t = int(time.time()) # Look for rapid increases, so we can detect smoke quickly # if 'p003_prev' not in locals(): p003_prev = 0 # This did not work :( Use global instead p003d = p003 - p003_prev p003_prev = p003 print('p003=' + str(p003) + ' p003d=' + str(p003d)) if t % 5 == 0 or p003d > 100 : mqttc.publish('sensor/air ' + sensor_inst, json.dumps(pm25_data)) print('mqtt published to: sensor/air ' + sensor_inst) if (sensor_inst == 'out' and t % 60 == 0) : publish_data(pm025s, pm010s, pm100s) if t % 10 == 0 : store_data('g1', aqi_in, aqi_out, aqi_out_i, tvoc) # Data for a 40 min graph: 240 / 40 = 6 readings per minute, 1 every 10 seconds. if t % 60 == 0 : store_data('g2', aqi_in, aqi_out, aqi_out_i, tvoc) # Data for a 4 hour graph: 240 / 4*60 = 1 readings per minute if t % (60*24) == 0 : store_data('g3', aqi_in, aqi_out, aqi_out_i, tvoc) # Data for a 4 day graph: 240 / 96 hours = 2.5 per hour, 1 for every 24 minutes if t % (60*5) == 0 : print('Saving air_data') with open(air_data_file, 'w') as f: json.dump(air_data, f, sort_keys=True, indent=4) f.close() # No longer using this max/min data # if p003 > air_data['g4']['p003'][-1] : air_data['g4']['p003'][-1] = p003 # if pm010s > air_data['g4']['pm010s'][-1] : air_data['g4']['pm010s'][-1] = pm010s # if aqi_pm25 > air_data['g4']['aqi_pm25'][-1] : air_data['g4']['aqi_pm25'][-1] = aqi_pm25 # if aqi_out > air_data['g4']['aqi_out'][-1] : air_data['g4']['aqi_out'][-1] = aqi_out # if p003 < air_data['g4']['p003'][-1] : air_data['g4']['p003'][-1] = p003 # if pm010s < air_data['g4']['pm010s'][-1] : air_data['g4']['pm010s'][-1] = pm010s # if aqi_pm25 < air_data['g4']['aqi_pm25'][-1] : air_data['g4']['aqi_pm25'][-1] = aqi_pm25 # if aqi_out < air_data['g4']['aqi_out'][-1] : air_data['g4']['aqi_out'][-1] = aqi_out def store_data(chart, aqi_in, aqi_out, aqi_out_i, tvoc): air_data[chart]['aqi_in'].append(aqi_in) air_data[chart]['aqi_out'].append(aqi_out) air_data[chart]['aqi_out_i'].append(aqi_out_i) air_data[chart]['tvoc'].append(tvoc) if len(air_data[chart]['aqi_in']) > 240: air_data[chart]['aqi_in'] = air_data[chart]['aqi_in'][1:] air_data[chart]['aqi_out'] = air_data[chart]['aqi_out'][1:] air_data[chart]['aqi_out_i'] = air_data[chart]['aqi_out_i'][1:] air_data[chart]['tvoc'] = air_data[chart]['tvoc'][1:] def store_data_old(chart): air_data[chart]['p003'].append(p003l) air_data[chart]['pm010s'].append(pm010sl) air_data[chart]['aqi_pm25'].append(aqi_pm25) air_data[chart]['aqi_out'].append(aqi_out) air_data[chart]['voc'].append(pm25_data['tvoc']) if len(air_data[chart]['pm010s']) > 240: air_data[chart]['p003'] = air_data[chart]['p003'][1:] air_data[chart]['pm010s'] = air_data[chart]['pm010s'][1:] air_data[chart]['aqi_pm25'] = air_data[chart]['aqi_pm25'][1:] air_data[chart]['aqi_out'] = air_data[chart]['aqi_out'][1:] air_data[chart]['voc'] = air_data[chart]['voc'][1:] def display_data(): draw.rectangle((0, 0, width, height), outline=0, fill=(0, 0, 0)) # Clear screen if 'aqi' not in data: return # Skip if we do not have data yet on startup if not buttonA.value : display_data2() elif not buttonB.value : display_data3() else : display_data1() my_disp.image(image, rotation) # Display image. def display_data1(): global air_data plot_data(air_data['g1']['aqi_in'], 1, '#FF0000') plot_data(air_data['g1']['aqi_out'], 1, '#00FF00') # plot_data(air_data['g1']['aqi_out_i'], 1, '#0000FF') plot_data(air_data['g1']['co2'], .5, '#0000FF') plot_data(air_data['g1']['tvoc'], .1, '#888888') to = data.get('tout', 0) ti = data.get('tin', 0) temps = 'T In/Out: ' + str(ti) + '/' + str(to) temps_diff = float(to) - float(ti) if abs(temps_diff) < 1 : temps_color = '#00FF00' elif temps_diff > 0 : temps_color = '#FF0000' else : temps_color = '#0000FF' cpu = 'C: ' + data['cload'] + ' ' + data['ctemp'] + ' ' + data['uptime'] x = 0 y = top draw.text((x, y), data['td'], font=font, fill='#FFFFFF') y += fontsize draw.text((x, y), temps, font=font, fill = temps_color) y += fontsize # draw.text((x, y), data['rpms'], font=font, fill='#FFFFFF') # y += fontsize draw.text((x, y), data['aqi'], font=font, fill='#FFFFFF') y += fontsize draw.text((x, y), data['rp1'], font=font, fill='#FFFFFF') # y += fontsize # draw.text((x, y), data['rp2'], font=font, fill='#FFFFFF') y += fontsize if (sensor_sgp30): draw.text((x, y), data['eco2'], font=font, fill='#FFFFFF') y += fontsize if (sensor_scd30 and 'co2' in data): draw.text((x, y), data['co2'], font=font, fill='#FFFFFF') y += fontsize # draw.text((x, y), cpu, font=font, fill='#0000FF') # y += fontsize def display_data2(): global air_data plot_data(air_data['g2']['aqi_in'], 1, '#FF0000') plot_data(air_data['g2']['aqi_out'], 1, '#00FF00') # plot_data(air_data['g2']['aqi_out_i'], 1, '#0000FF') plot_data(air_data['g2']['co2'], .5, '#0000FF') plot_data(air_data['g2']['tvoc'], .1, '#888888') # plot_data(air_data['g2']['p003'], 1, '#FF0000') # red 4 hour chart # plot_data(air_data['g2']['pm010s'], 1, '#00FF00') # green 4 hour chart # plot_data(air_data['g2']['aqi_pm25'], 1, '#0000FF') # Blue 4 hour chart # plot_data(air_data['g2']['aqi_out'], 1, '#00FFFF') # # plot_data(air_data['g2']['voc'], .1, '#888888') # def display_data3(): global air_data plot_data(air_data['g3']['aqi_in'], 1, '#FF0000') plot_data(air_data['g3']['aqi_out'], 1, '#00FF00') # plot_data(air_data['g3']['aqi_out_i'], 1, '#0000FF') plot_data(air_data['g3']['co2'], .5, '#0000FF') plot_data(air_data['g3']['tvoc'], .1, '#888888') def display_data4(): global air_data plot_data(air_data['g3']['p003'], 0.2, '#FF0000') # 4 day max/min charts plot_data(air_data['g3']['pm010s'], 0.5, '#00FF00') # plot_data(air_data['g4']['p003'], 0.2, '#FF8888') # plot_data(air_data['g4']['pm010s'], 0.5, '#88FF88') # plot_data(air_data['g3']['aqi_pm25'], 1, '#0000FF') # plot_data(air_data['g3']['aqi_out'], 1, '#00FFFF') # plot_data(air_data['g4']['aqi_pm25'], 1, '#FF0000') # plot_data(air_data['g4']['aqi_out'], 1, '#FFFF00') # def plot_data(data, scale, color): x = 0 yp = 0 for i in data: x = x + 1 y = bottom - int(scale * i) if x > 1: draw.line((x-1, yp, x, y), width=1, fill=(color)) yp = y if x % 60 == 0 : # Draw 4 grid lines draw.line((x, bottom, x, 150), width=1, fill=('#FFFFFF')) def plot_data_bar(data, scale, color): x = 0 for i in data: x = x + 1 y = bottom - int(scale * i) draw.line((x, bottom, x, y), width=1, fill=(color)) if x % 60 == 0 : # Draw 4 grid lines draw.line((x, bottom, x, 150), width=1, fill=('#FFFFFF')) # From: https://aqicn.org/data-feed/upload-api/ def publish_data(pm25, pm10, pm100): sensorReadings = [ {'specie':'pm25', 'value': pm25}, {'specie':'pm10', 'value': pm10}, {'specie':'pm100', 'value': pm100} ] station = { 'id': "starcross-01", 'name': "Starcross", 'location': { 'latitude': 33.437475, 'longitude': -86.779897 } } userToken = "409c9c0b706ab11a79b1a80b21355d1701bf52d4" params = {'station':station,'readings':sensorReadings,'token':userToken} try: request = requests.post( url = "https://aqicn.org/sensor/upload/", json = params) data = request.json() if data["status"]!="ok": print("Something went wrong: %s" % data) else: print("Data successfully posted: %s"%data) except: print('Unable to publish data to aqicn.org') def myloop(): global watchdog while True: t = time.time() print(time.strftime('%a %d %I:%M:%S', time.localtime()), end=' ', flush=True) print('.', end='', flush=True) get_air_data() print('-', end='', flush=True) get_cpu_data() print('+', end='', flush=True) display_data() print('=', flush=True) # Sleep enough to give 1 loop per second. Skip if this was a slow pass for whatever reason ts = 1 - (time.time() - t) # print(ts) if ts > 0: time.sleep(ts) watchdog += 1 if watchdog > 1: print(watchdog, flush=True) if watchdog > 500: print(sensor_inst + ' air watchdog heartbeat missing, rebooting') os.system('sudo reboot') if __name__ == '__main__': load_data() setup_mqtt() setup_sensors() setup_display() mqttc.loop_start() myloop()
gpl-2.0
2,333,927,129,253,851,000
40.136508
250
0.548143
false
2.860252
false
false
false
facebookexperimental/eden
eden/hg-server/edenscm/hgext/amend/restack.py
2
2295
# Copyright (c) Facebook, Inc. and its affiliates. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2. # restack.py - rebase to make a stack connected again from __future__ import absolute_import from edenscm.hgext import rebase from edenscm.mercurial import commands, revsetlang from edenscm.mercurial.i18n import _ def restack(ui, repo, **rebaseopts): """Repair a situation in which one or more commits in a stack have been obsoleted (thereby leaving their descendants in the stack orphaned) by finding any such commits and rebasing their descendants onto the latest version of each respective commit. """ rebaseopts = rebaseopts.copy() with repo.wlock(), repo.lock(): # Find drafts connected to the current stack via either changelog or # obsolete graph. Note: "draft() & ::." is optimized by D441. if not rebaseopts["rev"]: # 1. Connect drafts via changelog revs = list(repo.revs("(draft() & ::.)::")) if not revs: # "." is probably public. Check its direct children. revs = repo.revs("draft() & children(.)") if not revs: ui.status(_("nothing to restack\n")) return 1 # 2. Connect revs via obsolete graph revs = list(repo.revs("successors(%ld)+predecessors(%ld)", revs, revs)) # 3. Connect revs via changelog again to cover missing revs revs = list(repo.revs("draft() & ((draft() & %ld)::)", revs)) rebaseopts["rev"] = [ctx.hex() for ctx in repo.set("%ld", revs)] rebaseopts["dest"] = "_destrestack(SRC)" rebase.rebase(ui, repo, **rebaseopts) # Ensure that we always end up on the latest version of the # current changeset. Usually, this will be taken care of # by the rebase operation. However, in some cases (such as # if we are on the precursor of the base changeset) the # rebase will not update to the latest version, so we need # to do this manually. successor = repo.revs("successors(.) - .").last() if successor is not None: commands.update(ui, repo, rev=repo[successor].hex())
gpl-2.0
3,579,521,769,971,183,000
39.982143
83
0.622222
false
4.019264
false
false
false
CroceRossaItaliana/jorvik
attivita/migrations/0005_auto_20160122_1956.py
1
1811
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attivita', '0004_auto_20160118_2059'), ] operations = [ migrations.AlterModelOptions( name='partecipazione', options={'ordering': ['stato', 'persona__nome', 'persona__cognome'], 'verbose_name_plural': 'Richieste di partecipazione', 'verbose_name': 'Richiesta di partecipazione'}, ), migrations.AlterModelOptions( name='turno', options={'ordering': ['inizio', 'fine', 'id'], 'verbose_name_plural': 'Turni'}, ), migrations.AlterField( model_name='partecipazione', name='stato', field=models.CharField(db_index=True, choices=[('K', 'Part. Richiesta'), ('N', 'Non presentato/a')], default='K', max_length=1), ), migrations.AlterField( model_name='turno', name='fine', field=models.DateTimeField(db_index=True, blank=True, null=True, default=None, verbose_name='Data e ora di fine'), ), migrations.AlterField( model_name='turno', name='inizio', field=models.DateTimeField(db_index=True, verbose_name='Data e ora di inizio'), ), migrations.AlterField( model_name='turno', name='massimo', field=models.SmallIntegerField(db_index=True, blank=True, null=True, default=None, verbose_name='Num. massimo di partecipanti'), ), migrations.AlterField( model_name='turno', name='minimo', field=models.SmallIntegerField(db_index=True, default=1, verbose_name='Num. minimo di partecipanti'), ), ]
gpl-3.0
2,532,907,146,130,208,300
37.531915
182
0.583103
false
3.772917
false
false
false
John-Gee/HFRSteam
gamesinfo.py
1
11158
import asyncio import copy from datetime import datetime import os import sys import traceback import uvloop from cache import Cache from game import Category from game import StoreData from mapper import Mapper import namematching import parallelism import progressbar import steam import styledprint import utils import web def get_appid_and_type(name, games, appidstried): if (name in games): for app in games[name]: if (app[0] not in appidstried): return app return None, None def get_clean_matches(name, dic): return namematching.get_clean_matches(name, dic.keys(), 0.92) def get_appid_and_type_from_namematching(origname, name, games, appidstried, matches): while (True): if (matches is None): matches = get_clean_matches(name, games) if (len(matches) == 0): return None, None matchedname = matches[0] if (matchedname): score = namematching.get_match_score(name, matchedname) styledprint.print_info('Matched {0} with {1} at score {2}' .format(origname, matchedname, score)) appid, typ = get_appid_and_type(matchedname, games, appidstried) if (appid): return appid, typ else: matches.pop(0) else: return None, None async def get_game_info(options, game, cachedgames, steamgames, winedb, cleansteamgames, cleanwinedb, name, urlsmapping, webSession): try: if ((not options.all) and (not game.hfr.is_available)): # Ignoring not available games for now # it may be better in the future to ignore them in output # or allow the user to do so in the html page. return cleanname = None #cache = webSession.cache if (name in cachedgames): # Whether the cache is ignored or not, # if a game cached has a gift_date we keep it if (cachedgames[name].hfr.gift_date): game.hfr.gift_date = cachedgames[name].hfr.gift_date if (not options.ignorecache): game.store = cachedgames[name].store game.wine = cachedgames[name].wine # query the store if: # - not cacheonly # - not in cache # - in cache and to be refreshed if ((not options.cacheonly) and ((not game.store.link) or ((options.game) and (options.game.lower() in name.lower())))): # keep the old information if there is no new one if (game.store.link): storeBU = copy.deepcopy(game.store) worked = await steam.get_store_info(game, name, webSession) if (worked): styledprint.print_info('Info for game {0} was retrieved, {1}' .format(name, str(datetime.now().time()))) else: game.store = storeBU else: mapping = urlsmapping.get_mapping(name) if (mapping == None): appidstried = [] matches = None while (True): appid, typ = get_appid_and_type(name, steamgames, appidstried) if (appid): styledprint.print_debug('The game {0} got its appid simply' .format(name)) elif (options.fuzzymatching): # it seems quicker to recompute it than use redis #cleanname = await cache.function(namematching.nameclean, name) cleanname = namematching.nameclean(name) appid, typ = get_appid_and_type(cleanname, cleansteamgames, appidstried) if (appid): styledprint.print_debug('The game {0} got its appid ' 'by namecleaning' .format(name)) else: appid, typ = get_appid_and_type_from_namematching(name, cleanname, cleansteamgames, appidstried, matches) if ((appid in appidstried) or (not appid)): game.store = StoreData() game.store.description = 'The game was not found in the steam db' styledprint.print_error('{0}: {1}' .format(game.store.description, name)) cleanname = None return else: appidstried.append(appid) if (await steam.get_store_info_from_appid(game, name, appid, typ, webSession)): break else: url = mapping[0] if (url == 'ignore'): styledprint.print_debug('{0} cannot be found and is to be ignored' .format(name)) return styledprint.print_debug('URL mapping found for game {0}' .format(name)) await steam.get_store_info_from_url(game, name, url, webSession) # overwriting the steam provided category if (len(mapping) == 2): game.store.category = Category[mapping[1].upper()] game.store.override = True styledprint.print_info('Info for game {0} was retrieved, {1}' .format(name, str(datetime.now().time()))) # TODO # compute cleanname once # cache heavy stuff if (name in winedb): game.wine = winedb[name] elif (options.fuzzymatching): if (cleanname is None): # it seems quicker to recompute it than use redis #cleanname = await cache.function(namematching.nameclean, name) cleanname = namematching.nameclean(name) if (cleanname in cleanwinedb): game.wine = cleanwinedb[cleanname] else: cleanmatches = get_clean_matches(cleanname, cleanwinedb) if (len(cleanmatches)): game.wine = cleanwinedb[cleanmatches[0]] except Exception as e: styledprint.print_error('An exception was raised for', name) raise e # TODO # need to cache nameclean # last try was way slower than without cache def clean_names(names): cleannames = utils.DictCaseInsensitive() for name in names: cleanname = namematching.nameclean(name) if (cleanname in cleannames): for t in names[name]: if (t not in cleannames[cleanname]): cleannames[cleanname].append(t) else: cleannames[cleanname] = names[name] return cleannames def start_loop(subGames, options, cachedgames, steamgames, winedb, cleansteamgames, cleanwinedb, urlsmapping, cpuCount): asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) webSession = web.Session(limit_per_host=((20/cpuCount) + 1)) tasks = [] for name in subGames: game = subGames[name] tasks.append(asyncio.ensure_future(get_game_info(options, game, cachedgames, steamgames, winedb, cleansteamgames, cleanwinedb, name, urlsmapping, webSession), loop=loop)) loop.run_until_complete(asyncio.gather(progressbar.progress_bar(tasks), loop=loop)) loop.run_until_complete(webSession.close()) return subGames def get_games_info(options, games, steamgames, winedb): styledprint.print_info_begin('Pulling Games Information') CACHE_PATH = os.path.join('cache', 'games.p') cache = Cache(CACHE_PATH) cachedgames = cache.load_from_cache() cleansteamgames = utils.DictCaseInsensitive() cleanwinedb = utils.DictCaseInsensitive() if (options.fuzzymatching): parallelism.split_submit_job(steamgames, cleansteamgames, clean_names) parallelism.split_submit_job(winedb, cleanwinedb, clean_names) URLS_MAPPING = os.path.join('mappings', 'urlsmapping.txt') urlsmapping = Mapper(URLS_MAPPING) parallelism.split_submit_job(games, games, start_loop, options, cachedgames, steamgames, winedb, cleansteamgames, cleanwinedb, urlsmapping, parallelism.get_number_of_cores()) if (not options.dryrun): newcachedgames = cache.merge_old_new_cache(cachedgames, games) cache.save_to_cache(newcachedgames) urlsmapping.save_mapping() styledprint.print_info_end('Pulling Games Information Done') if __name__ == '__main__': origname = "My Test1 is the good" name = "MYTEST1ISGOOD" games = {'MYTEST1ISGOOD': [('1', 'app'), ('2', 'app')], 'MYTEST1ISGOODS': [('3', 'app'), ('4', 'app'), ('5', 'app')], 'MYTEST1ISSOOOOOGOODSS': [('6', 'app'), ('7', 'app')]} appidstried = [] matches = [] appid = '' while (appid != '4'): appid, typ = get_appid_and_type_from_namematching(origname, name, games, appidstried, matches) appidstried.append(appid) print(appid) print('Got the appid needed!')
gpl-3.0
3,735,292,828,559,162,000
40.947368
96
0.475085
false
4.656928
false
false
false
teknolab/teknolab-wapiti
wapiti/attack/attack.py
1
4042
# Wapiti SVN - A web application vulnerability scanner # Wapiti Project (http://wapiti.sourceforge.net) # Copyright (C) 2008 Nicolas Surribas # # David del Pozo # Alberto Pastor # Informatica Gesfor # ICT Romulus (http://www.ict-romulus.eu) # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import os import socket # for trapping socket.error from file.auxtext import AuxText class Attack: """ This class represents an attack, it must be extended for any class which implements a new type of attack """ verbose = 0 color = 0 name = "attack" reportGen = None HTTP = None auxText = None doGET = True doPOST = True # List of modules (strs) that must be launched before the current module # Must be defined in the code of the module require = [] # List of modules (objects) that must be launched before the current module # Must be left empty in the code deps = [] # List of attack's url already launched in the current module attackedGET = [] attackedPOST = [] vulnerableGET = [] vulnerablePOST = [] CONFIG_DIR = "" if os.path.isdir("/usr/local/share/doc/packages/wapiti"): CONFIG_DIR = "/usr/local/share/doc/packages/wapiti/config/attacks" else: BASE_DIR = os.path.normpath(os.path.join(os.path.abspath(__file__),'../..')) CONFIG_DIR = BASE_DIR + "/" + "config/attacks" # Color codes STD = "\033[0;0m" RED = "\033[1;31m" YELLOW = "\033[1;33m" CYAN = "\033[1;36m" GB = "\033[0;30m\033[47m" allowed = ['php', 'html', 'htm', 'xml', 'xhtml', 'xht', 'xhtm', 'asp', 'aspx', 'php3', 'php4', 'php5', 'txt', 'shtm', 'shtml', 'phtm', 'phtml', 'jhtml', 'pl', 'jsp', 'cfm', 'cfml', 'py'] # The priority of the module, from 0 (first) to 10 (last). Default is 5 PRIORITY = 5 def __init__(self,HTTP,reportGen): self.HTTP = HTTP self.reportGen = reportGen self.auxText = AuxText() def setVerbose(self,verbose): self.verbose = verbose def setColor(self): self.color = 1 def loadPayloads(self,fileName): """This method loads the payloads for an attack from the specified file""" return self.auxText.readLines(fileName) def attackGET(self, page, dict, headers = {}): return def attackPOST(self, form): return def loadRequire(self, obj = []): self.deps = obj def attack(self, urls, forms): if self.doGET == True: for url, headers in urls.items(): dictio = {} params = [] page = url if url.find("?") >= 0: page = url.split('?')[0] query = url.split('?')[1] params = query.split('&') if query.find("=") >= 0: for param in params: dictio[param.split('=')[0]] = param.split('=')[1] if self.verbose == 1: print "+ " + _("attackGET") + " "+url if params != []: print " ", params try: self.attackGET(page, dictio, headers) except socket.error, se: print 'error: %s while attacking %s' % (repr(str(se[1])), url) if self.doPOST == True: for form in forms: if form[1] != {}: self.attackPOST(form)
gpl-2.0
-5,216,976,222,578,111,000
29.390977
82
0.591291
false
3.648014
false
false
false
stack-of-tasks/rbdlpy
tutorial/lib/python2.7/site-packages/OpenGLContext/browser/visual.py
2
10047
"""VPython as a target: Documentation on which we're basing our implementation: http://www.vpython.org/webdoc/visual/index.html What are the key determinants in the visual python API's success: geometry is simply created system has interactive operation (shell) flat hierarchies used throughout multiple mechanisms mapped to the same features use of "vector"-based orientation? simplified scripting APIs assumptions about application operation intuitive """ from math import pi from vrml import node, field import threading, time from OpenGLContext.arrays import * import colorsys as _colorsys _application=None _GUIThread=None from OpenGLContext.browser.proxy import proxyField from OpenGLContext.browser import geometry from OpenGLContext.browser.vector import * from OpenGLContext.browser.crayola import color def _newNode( cls, named ): """Construct new instance of cls, set proper color, and add to objects""" if not scene.visible: scene.visible = 1 if not [k for k in ('color','red','green','blue') if k in named]: named['color'] = scene.foreground if 'display' in named: target = named['display'] del named['display'] # XXX fix when have backref added else: target = scene if not target.visible: target.visible = 1 node = cls(**named) objs = target.objects objs.append( node ) target.objects = objs return node def rgb_to_hsv(T): """Convert RGB tuple to HSV value""" return _colorsys.rgb_to_hsv( *T ) def hsv_to_rgb(T): """Convert HSV tuple to RGB value""" return _colorsys.hsv_to_rgb( *T ) def sphere( **named ): """Create a sphere, adding to current scene""" return _newNode( geometry.VPSphere, named ) def cone( **named ): """Create a cone, adding to current scene""" return _newNode( geometry.VPCone, named ) def cylinder( **named ): """Create a cylinder, adding to current scene""" return _newNode( geometry.VPCylinder, named ) def box( **named ): """Create a box, adding to current scene""" return _newNode( geometry.VPBox, named ) def curve( **named ): """Create a new curve, adding to the current scene""" return _newNode( geometry.VPCurve, named ) class Display( node.Node ): """Proxy object for the "display" (context) Visual puts a lot of functionality into this object, with most of the fields being "trigger something on set" fields, rather than "normal" fields that just store values. """ _frame = None _context = None ### GUI/frame configuration/control stuff cursor = proxyField("cursor", "SFNode", 1, None) x = proxyField( 'x', "SFInt32", 1, 0) y = proxyField( 'y', "SFInt32", 1, 0) width = proxyField( 'width', "SFInt32", 1, 400) height = proxyField( 'height', "SFInt32", 1, 300) title = proxyField( "title", "SFString", 1, "OpenGLContext" ) visible = proxyField( "visible", "SFBool", 1, 0 ) # exit-on-close exit = proxyField( "exit", "SFBool", 1, 1 ) # background represents a SimpleBackground-style node... background = proxyField( 'background', 'SFColor', 1, (0,0,0)) # ambient lighting ambient = proxyField("ambient", "SFFloat", 1, 0.2) # will have to be manually extracted from scenegraph lights = proxyField("lights", "MFNode", 1, list) # will have to be manually extracted from scenegraph objects = proxyField("objects", "MFNode", 1, list) ### viewplatform stuff center = proxyField( "center", "SFVec3f", 1, (0,0,0) ) autocenter = proxyField( "autocenter", "SFBool", 1, 1 ) forward = proxyField( "forward", "SFVec3f", 1, (0,0,-1)) fov = proxyField( "fov", "SFFloat", 1, pi/3.0) up = proxyField( "up", "SFVec3f", 1, (0,1,0)) # bounding box for the entire scene range = proxyField( "range", "SFVec3f", 1, (10,10,10)) # inverse of range scale = proxyField( "scale", "SFVec3f", 1, (.1,.1,.1)) autoscale = proxyField( "autoscale", "SFBool", 1, 1 ) # what is this trying to do? uniform = proxyField( "uniform", "SFBool", 1, 0) # interactivity binding... userzoom = proxyField( "userzoom", "SFBool", 1, 0) userspin = proxyField( "userspin", "SFBool", 1, 0) # default object-creation colour foreground = proxyField( 'foreground', 'SFColor', 1, (1,0,0)) def select( self ): """Makes this the currently-rendering context""" def create( self, event ): """Create rendering context for this display Create the application if necessary, then the frame, and return only after the frame has been created. """ global _application if not _application: import wx class Application (wx.PySimpleApp): def OnInit(self, display=self, event=event): frame = display.createFrame() event.set() self.SetTopWindow(frame) return 1 _application = app = Application() app.MainLoop() else: self.createFrame() event.set() def createFrame( self ): """Create a new rendering frame""" import wx from OpenGLContext.browser import nodes self._frame = wx.Frame( None, -1, self.title, (self.x,self.y), (self.width,self.height), ) self._context = nodes.VisualContext( self._frame, ) return self._frame def set_visible( self, value, fieldObject, *arguments, **named): """Visibility has changed, either hide or show""" if value and not self._frame: e = threading.Event() global _GUIThread if not _GUIThread: _GUIThread = threading.Thread( name="GUIThread", target=self.create, args=(e,) ) _GUIThread.start() else: callInGUIThread( self.create, e ) # if we haven't done anything in 10 seconds, we'll # have an exception raised... e.wait(10.0) import wx if value: callInGUIThread( self._frame.Show, True ) elif self._frame: if self.exit: callInGUIThread( self._frame.Close ) while self in scenes: scenes.remove( self ) else: callInGUIThread( self._frame.Show, 0 ) def set_pos( self, value, field, *arguments, **named ): if self._frame: callInGUIThread( self._frame.Move, (self.x, self.y)) setattr(self._context, field.name, value ) set_x = set_pos set_y = set_pos def set_size( self, value, field, *arguments, **named ): if self._frame: callInGUIThread( self._frame.SetSize, (self.width, self.height)) setattr(self._context, field.name, value ) set_width = set_size set_height = set_size def set_title( self, value, *arguments, **named ): if self._frame: callInGUIThread( self._frame.SetTitle, value ) def _buildPlatformForward( name ): """Utility to build forwarder for the context.platform object""" def f( self, value, *arguments, **named ): if self._frame: if self._context and self._context.platform: callInGUIThread( setattr, self._context.platform, name, value) return f set_center = _buildPlatformForward( 'center' ) set_autocenter = _buildPlatformForward( 'autocenter' ) set_autoscale = _buildPlatformForward( 'autoscale' ) set_range = _buildPlatformForward( 'range' ) set_uniform = _buildPlatformForward( 'uniform' ) set_fov = _buildPlatformForward( 'fov' ) set_up = _buildPlatformForward( 'up' ) del _buildPlatformForward def set_scale( self, value, field, *arguments, **named ): """Scale is just a 1/range thing, so update range instead""" if isinstance( value, (int,long,float)): value = (value,value,value) if 0.0 in value: r = [] for i in value: if i == 0: r.append( 1.0e32) else: r.append( 1.0/i ) self.range = r else: self.range = array((1.0,1.0,1.0),'d')/asarray( value, 'd') def set_background( self, value, field, *arguments, **named ): """Set the scenegraph background attribute""" if self._context: callInGUIThread( setattr, self._context.getSceneGraph(), field.name, value ) set_objects = set_background def callInGUIThread( callable, *arguments, **named ): """Call the callable object in the GUI thread This adds a record to the eventCascadeQueue, which will executed during the standard DoEventCascade method. Note: the callable will be called in the event Cascade of the first valid scene context. """ alreadyPut = 0 for scene in scenes: context = getattr(scene,'_context') if context and not alreadyPut: context.eventCascadeQueue.put( (callable,arguments,named) ) alreadyPut = 1 if context: context.triggerRedraw( 1 ) scenes = [] def display( **named ): """Create a new display for the system""" item = Display( **named ) scenes.append( item ) return item def rate( framesPerSecond=30 ): """Allow animation to continue at given rate""" ## XXX This is totally wrong, should be tracking last-frame-start ## and deciding how to delay based on that :( time.sleep( 1.0/framesPerSecond ) if scene and scene._context: scene._context.triggerRedraw( 1 ) def select( display ): """Make the given display the "current" display (global scene)""" global scene scene = display return display scene = None select(display(DEF="Primary Window", title="Visual OpenGLContext"))
lgpl-3.0
6,886,235,157,161,103,000
33.057627
96
0.606052
false
3.820152
false
false
false
dmpiergiacomo/scion
python/lib/packet/cert_mgmt.py
1
3775
# Copyright 2015 ETH Zurich # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ :mod:`cert_mgmt` --- SCION cert/trc managment packets ===================================================== """ # External import capnp # noqa # SCION import proto.cert_mgmt_capnp as P from lib.crypto.certificate_chain import CertificateChain from lib.crypto.trc import TRC from lib.errors import SCIONParseError from lib.packet.packet_base import SCIONPayloadBaseProto from lib.packet.scion_addr import ISD_AS from lib.types import CertMgmtType, PayloadClass class CertMgmtBase(SCIONPayloadBaseProto): # pragma: no cover PAYLOAD_CLASS = PayloadClass.CERT def _pack_full(self, p): wrapper = P.CertMgmt.new_message(**{self.PAYLOAD_TYPE: p}) return super()._pack_full(wrapper) class CertMgmtRequest(CertMgmtBase): # pragma: no cover def isd_as(self): return ISD_AS(self.p.isdas) @classmethod def from_values(cls, isd_as, version, cache_only=False): return cls(cls.P_CLS.new_message(isdas=int(isd_as), version=version, cacheOnly=cache_only)) class CertChainRequest(CertMgmtRequest): NAME = "CertChainRequest" PAYLOAD_TYPE = CertMgmtType.CERT_CHAIN_REQ P_CLS = P.CertChainReq def short_desc(self): return "%sv%s (Cache only? %s)" % (self.isd_as(), self.p.version, self.p.cacheOnly) class CertChainReply(CertMgmtBase): # pragma: no cover NAME = "CertChainReply" PAYLOAD_TYPE = CertMgmtType.CERT_CHAIN_REPLY P_CLS = P.CertChainRep def __init__(self, p): super().__init__(p) self.chain = CertificateChain.from_raw(p.chain, lz4_=True) @classmethod def from_values(cls, chain): return cls(cls.P_CLS.new_message(chain=chain.pack(lz4_=True))) def short_desc(self): return "%sv%s" % self.chain.get_leaf_isd_as_ver() def __str__(self): isd_as, ver = self.chain.get_leaf_isd_as_ver() return "%s: ISD-AS: %s Version: %s" % (self.NAME, isd_as, ver) class TRCRequest(CertMgmtRequest): NAME = "TRCRequest" PAYLOAD_TYPE = CertMgmtType.TRC_REQ P_CLS = P.TRCReq def short_desc(self): return "%sv%s (Cache only? %s)" % (self.isd_as()[0], self.p.version, self.p.cacheOnly) class TRCReply(CertMgmtBase): # pragma: no cover NAME = "TRCReply" PAYLOAD_TYPE = CertMgmtType.TRC_REPLY P_CLS = P.TRCRep def __init__(self, p): super().__init__(p) self.trc = TRC.from_raw(p.trc, lz4_=True) @classmethod def from_values(cls, trc): return cls(cls.P_CLS.new_message(trc=trc.pack(lz4_=True))) def short_desc(self): return "%sv%s" % self.trc.get_isd_ver() def __str__(self): isd, ver = self.trc.get_isd_ver() return "%s: ISD: %s version: %s TRC: %s" % ( self.NAME, isd, ver, self.trc) def parse_certmgmt_payload(wrapper): # pragma: no cover type_ = wrapper.which() for cls_ in CertChainRequest, CertChainReply, TRCRequest, TRCReply: if cls_.PAYLOAD_TYPE == type_: return cls_(getattr(wrapper, type_)) raise SCIONParseError("Unsupported cert management type: %s" % type_)
apache-2.0
-836,124,035,748,544,900
30.991525
76
0.636556
false
3.226496
false
false
false
henrysher/opslib
opslib/icsutils/jsondiff.py
1
9312
""" JsonDiff: Library for JSON DIFF ------------------------------- +------------------------+-------------+ | This is the JsonDiff common library. | +------------------------+-------------+ """ try: import json except ImportError: import simplejson as json from opslib.icsexception import IcsException import logging log = logging.getLogger(__name__) def is_scalar(value): """ Primitive version, relying on the fact that JSON cannot contain any more complicated data structures. """ return not isinstance(value, (list, tuple, dict)) class Comparator(object): """ Main workhorse for JSON Comparator """ def __init__(self, fp1=None, fp2=None, include=[], exclude=[], ignore_add=False): """ :type fp1: object :param fp1: file object (opened with read permission) :type fp2: object :param fp2: file object (opened with read permission) :type include: list :param include: a list of attributes to include in the comparison :type exclude: list :param exclude: a list of attributes to exclude in the comparison :type ignore_add: bool :param ignore_add: whether to ignore the added items in the comparison **Example:** >>> from opslib.icsutils.jsondiff import Comparator >>> import json >>> old_json = { ... "name": "opslib", ... "version": "1.2.0", ... "members": { ... "role": "ops", ... "group": [ "ops", "devops" ] ... } ... } >>> new_json = { ... "name": "opslib", ... "version": "1.3.0", ... "members": { ... "role": "devops", ... "group": [ "devops" ] ... } ... } >>> json.dump(old_json, open("old.json", "w")) >>> json.dump(new_json, open("new.json", "w")) >>> fp_old = open("old.json", "r") >>> fp_new = open("new.json", "r") >>> engine = Comparator(fp_old, fp_new) >>> res = engine.compare_dicts() >>> print json.dumps(res, sort_keys=True, indent=4) { "members": { "group": { "0": { "+++": "devops", "---": "ops" }, "1": { "---": "devops" } }, "role": { "+++": "devops", "---": "ops" } }, "version": { "+++": "1.3.0", "---": "1.2.0" } } """ self.obj1 = None self.obj2 = None if fp1: try: self.obj1 = json.load(fp1) except (TypeError, OverflowError, ValueError), exc: raise IcsException("Cannot decode object from JSON.\n%s" % unicode(exc)) if fp2: try: self.obj2 = json.load(fp2) except (TypeError, OverflowError, ValueError), exc: raise IcsException("Cannot decode object from JSON\n%s" % unicode(exc)) self.excluded_attributes = [] self.included_attributes = [] self.ignore_added = False if include: self.included_attributes = include or [] if exclude: self.excluded_attributes = exclude or [] if ignore_add: self.ignore_added = ignore_add or False def _is_incex_key(self, key, value): """Is this key excluded or not among included ones? If yes, it should be ignored.""" key_out = ((self.included_attributes and (key not in self.included_attributes)) or (key in self.excluded_attributes)) value_out = True if isinstance(value, dict): for change_key in value: if isinstance(value[change_key], dict): for key in value[change_key]: if ((self.included_attributes and (key in self.included_attributes)) or (key not in self.excluded_attributes)): value_out = False return key_out and value_out def _filter_results(self, result): """Whole -i or -x functionality. Rather than complicate logic while going through the object's tree we filter the result of plain comparison. Also clear out unused keys in result""" out_result = {} for change_type in result: temp_dict = {} for key in result[change_type]: log.debug("change_type = %s", change_type) if self.ignore_added and (change_type == "+++"): continue log.debug("result[change_type] = %s, key = %s", unicode(result[change_type]), key) log.debug("self._is_incex_key = %s", self._is_incex_key( key, result[change_type][key])) if not self._is_incex_key(key, result[change_type][key]): temp_dict[key] = result[change_type][key] if len(temp_dict) > 0: out_result[change_type] = temp_dict return out_result def _compare_elements(self, old, new): """Unify decision making on the leaf node level.""" res = None # We want to go through the tree post-order if isinstance(old, dict): res_dict = self.compare_dicts(old, new) if (len(res_dict) > 0): res = res_dict # Now we are on the same level # different types, new value is new elif (type(old) != type(new)): res = {'---': old, '+++': new} # recursive arrays # we can be sure now, that both new and old are # of the same type elif (isinstance(old, list)): res_arr = self._compare_arrays(old, new) if (len(res_arr) > 0): res = res_arr # the only thing remaining are scalars else: scalar_diff = self._compare_scalars(old, new) if scalar_diff is not None: res = scalar_diff return res def _compare_scalars(self, old, new, name=None): """ Be careful with the result of this function. Negative answer from this function is really None, not False, so deciding based on the return value like in if self._compare_scalars(...): leads to wrong answer (it should be if self._compare_scalars(...) is not None:) """ # Explicitly excluded arguments if old != new: return {'---': old, '+++': new} else: return None def _compare_arrays(self, old_arr, new_arr): """ simpler version of compare_dicts; just an internal method, because it could never be called from outside. We have it guaranteed that both new_arr and old_arr are of type list. """ inters = min(len(old_arr), len(new_arr)) # this is the smaller length result = { u"+++": {}, u"---": {}, } for idx in range(inters): res = self._compare_elements(old_arr[idx], new_arr[idx]) if res is not None: result[idx] = res # the rest of the larger array if (inters == len(old_arr)): for idx in range(inters, len(new_arr)): result[idx] = {u'+++': new_arr[idx]} else: for idx in range(inters, len(old_arr)): result[idx] = {u'---': old_arr[idx]} # Clear out unused keys in result out_result = {} for key in result: if len(result[key]) > 0: out_result[key] = result[key] return self._filter_results(result) def compare_dicts(self, old_obj=None, new_obj=None): """ The real workhorse """ if not old_obj and hasattr(self, "obj1"): old_obj = self.obj1 if not new_obj and hasattr(self, "obj2"): new_obj = self.obj2 old_keys = set() new_keys = set() if old_obj and len(old_obj) > 0: old_keys = set(old_obj.keys()) if new_obj and len(new_obj) > 0: new_keys = set(new_obj.keys()) keys = old_keys | new_keys result = { u"+++": {}, u"---": {}, } for name in keys: # old_obj is missing if name not in old_obj: result[u'+++'][name] = new_obj[name] # new_obj is missing elif name not in new_obj: result[u'---'][name] = old_obj[name] else: res = self._compare_elements(old_obj[name], new_obj[name]) if res is not None: result[name] = res return self._filter_results(result) # vim: tabstop=4 shiftwidth=4 softtabstop=4
apache-2.0
-801,774,102,115,567,100
31.559441
78
0.474871
false
4.175785
false
false
false
erral/plugin.video.eitb
utils.py
1
1638
import requests API_URL = 'https://still-castle-99749.herokuapp.com' VIDEO_PLAYLIST_URL = '{0}/playlist'.format(API_URL) LAST_BROADCAST_VIDEO_PLAYLIST_URL = '{0}/last-broadcast-list'.format(API_URL) RADIO_PLAYLIST_URL = '{0}/radio'.format(API_URL) PROGRAM_TYPES_URL = '{0}/program-type-list'.format(API_URL) PROGRAM_TYPES_PLAYLIST_URL = '{0}/type-playlist'.format(API_URL) def get_programs(): data = requests.get(VIDEO_PLAYLIST_URL) return data.json().get('member') def get_programs_types(): data = requests.get(PROGRAM_TYPES_URL) return data.json().get('member') def get_programs_types_playlist(url): data = requests.get(url) return data.json().get('member') def get_last_broadcast(items): data = requests.get('{0}?items={1}'.format(LAST_BROADCAST_VIDEO_PLAYLIST_URL, items)) return data.json().get('member') def get_episodes(url): data = requests.get(url) return data.json().get('member') def get_videos(url): data = requests.get(url) return data.json() def get_radio_programs(): data = requests.get(RADIO_PLAYLIST_URL) return data.json().get('member') def get_program_audios(url): data = requests.get(url) return data.json().get('member') import xbmcaddon import xbmc def get_local_string(string): ''' Get add-on string. Returns in unicode. ''' # if type(string) != int: # string = STRINGS[string] result = xbmcaddon.Addon('plugin.video.eitb').getLocalizedString(string) # Plugin-aren itzulpena ez bada, kodi sistemakoa lortu if not result: result = xbmc.getLocalizedString(string) return result
gpl-3.0
-5,499,499,826,162,889,000
23.833333
89
0.675824
false
3.15
false
false
false
wzhang1984/Noncoding-tumor-mutation-paper
annotate_mutations/annotate_mut_GeneHancer.py
1
2878
# Annotate noncoding mutations using GeneHancer # https://www.ncbi.nlm.nih.gov/pmc/articles/PMC5467550/ import sys fn_prefix = sys.argv[1] merge_size = sys.argv[2] occurrence_cutoff = int(sys.argv[3]) binsize = 10000 print 'Reading GeneHancer_hg19' bin2GeneHancer = {} fn = '/cellar/data/users/wzhang1984/GeneHancer/GeneHancer_hg19.bed' for line in open(fn).read().rstrip().splitlines(): row = line.split('\t') Chr = row[0] Start = int(row[1]) End = int(row[2]) Pos = '\t'.join(row[1:3]) genes = [] for entry in row[3].split(';'): entry_split = entry.split('connected_gene=') if len(entry_split) == 2: genes.append(entry_split[1]) binstart = Start / binsize binend = End / binsize for binpos in range(binstart, binend+1): Bin = '{}:{}'.format(Chr, binpos) if Bin not in bin2GeneHancer: bin2GeneHancer[Bin] = {} if Pos not in bin2GeneHancer[Bin]: bin2GeneHancer[Bin][Pos] = genes print 'Reading Homer TSS annotation' locus2promoter = {} header = True for line in open("./" + fn_prefix + "_merged_" + merge_size + "_homerAnno.txt"): if header: header = False continue row = line.split("\t") if row[9] == 'NA': continue dist2tss = int(row[9]) if dist2tss>=-1000 and dist2tss<=1000: locus2promoter["\t".join([row[1], str(int(row[2])-1), row[3]])] = row[15] print 'Reading noncoding mutation loci' nline = 0 line_out = "" for line in open("./" + fn_prefix + "_merged_" + merge_size + ".txt").read().rstrip().splitlines(): row = line.split("\t") nline += 1 if nline%1000 == 0: print nline if int(row[3]) < occurrence_cutoff: continue row = line.split("\t") Chr = row[0] Start = int(row[1]) - 100 End = int(row[2]) + 100 mid = (Start+End) / 2.0 locus = "\t".join(row[:3]) genes = set() if locus in locus2promoter: genes.add(locus2promoter[locus] + '|TSS') binstart = Start / binsize binend = End / binsize for binpos in range(binstart, binend+1): Bin = '{}:{}'.format(Chr, binpos) if Bin in bin2GeneHancer: for Pos in bin2GeneHancer[Bin]: [genehancerstart,genehancerend] = [int(i) for i in Pos.split("\t")] genehancermid = (genehancerstart+genehancerend) / 2.0 if (genehancermid>=Start-0.5 and genehancermid<=End+0.5) or (mid>=genehancerstart-0.5 and mid<=genehancerend+0.5): for gene in bin2GeneHancer[Bin][Pos]: genes.add(gene + '|enhancer') if genes != set(): line_out += line + '\t' + ','.join(genes) + '\n' open(fn_prefix+"_merged_"+merge_size+"_anno_promoter_genehancer.txt","w").write(line_out)
gpl-3.0
-6,248,541,469,730,508,000
31.08046
130
0.567755
false
2.970072
false
false
false
pawaranand/phr_frappe
frappe/website/statics.py
17
5584
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import frappe, os, time def sync_statics(rebuild=False): s = sync() s.verbose = True # s.start(rebuild) # frappe.db.commit() while True: s.start(rebuild) frappe.db.commit() time.sleep(2) rebuild = False class sync(object): def __init__(self, verbose=False): self.verbose = verbose def start(self, rebuild=False): self.synced = [] self.synced_paths = [] self.updated = 0 if rebuild: frappe.db.sql("delete from `tabWeb Page` where ifnull(template_path, '')!=''") for app in frappe.get_installed_apps(): self.sync_for_app(app) self.cleanup() def sync_for_app(self, app): self.statics_path = frappe.get_app_path(app, "templates", "statics") if os.path.exists(self.statics_path): for basepath, folders, files in os.walk(self.statics_path): self.sync_folder(basepath, folders, files) def sync_folder(self, basepath, folders, files): self.get_index_txt(basepath, files) index_found = self.sync_index_page(basepath, files) if not index_found and basepath!=self.statics_path: # not synced either by generator or by index.html return if self.index: self.sync_using_given_index(basepath, folders, files) else: self.sync_alphabetically(basepath, folders, [filename for filename in files if filename.endswith('html') or filename.endswith('md')]) def get_index_txt(self, basepath, files): self.index = [] if "index.txt" in files: with open(os.path.join(basepath, "index.txt"), "r") as indexfile: self.index = indexfile.read().splitlines() def sync_index_page(self, basepath, files): for extn in ("md", "html"): fname = "index." + extn if fname in files: self.sync_file(fname, os.path.join(basepath, fname), None) return True def sync_using_given_index(self, basepath, folders, files): for i, page_name in enumerate(self.index): if page_name in folders: # for folder, sync inner index first (so that idx is set) for extn in ("md", "html"): path = os.path.join(basepath, page_name, "index." + extn) if os.path.exists(path): self.sync_file("index." + extn, path, i) break # other files if page_name + ".md" in files: self.sync_file(page_name + ".md", os.path.join(basepath, page_name + ".md"), i) elif page_name + ".html" in files: self.sync_file(page_name + ".html", os.path.join(basepath, page_name + ".html"), i) else: if page_name not in folders: print page_name + " not found in " + basepath def sync_alphabetically(self, basepath, folders, files): files.sort() for fname in files: page_name = fname.rsplit(".", 1)[0] if not (page_name=="index" and basepath!=self.statics_path): self.sync_file(fname, os.path.join(basepath, fname), None) def sync_file(self, fname, template_path, priority): route = os.path.relpath(template_path, self.statics_path).rsplit(".", 1)[0] if fname.rsplit(".", 1)[0]=="index" and \ os.path.dirname(template_path) != self.statics_path: route = os.path.dirname(route) parent_web_page = frappe.db.sql("""select name from `tabWeb Page` where page_name=%s and ifnull(parent_website_route, '')=ifnull(%s, '')""", (os.path.basename(os.path.dirname(route)), os.path.dirname(os.path.dirname(route)))) parent_web_page = parent_web_page and parent_web_page[0][0] or "" page_name = os.path.basename(route) published = 1 idx = priority if (parent_web_page, page_name) in self.synced: return title = self.get_title(template_path) if not frappe.db.get_value("Web Page", {"template_path":template_path}): web_page = frappe.new_doc("Web Page") web_page.page_name = page_name web_page.parent_web_page = parent_web_page web_page.template_path = template_path web_page.title = title web_page.published = published web_page.idx = idx web_page.from_website_sync = True web_page.insert() if self.verbose: print "Inserted: " + web_page.name else: web_page = frappe.get_doc("Web Page", {"template_path":template_path}) dirty = False for key in ("parent_web_page", "title", "template_path", "published", "idx"): if web_page.get(key) != locals().get(key): web_page.set(key, locals().get(key)) dirty = True if dirty: web_page.from_website_sync = True web_page.save() if self.verbose: print "Updated: " + web_page.name self.synced.append((parent_web_page, page_name)) def get_title(self, fpath): title = os.path.basename(fpath).rsplit(".", 1)[0] if title =="index": title = os.path.basename(os.path.dirname(fpath)) title = title.replace("-", " ").replace("_", " ").title() with open(fpath, "r") as f: content = unicode(f.read().strip(), "utf-8") if content.startswith("# "): title = content.splitlines()[0][2:] if "<!-- title:" in content: title = content.split("<!-- title:", 1)[1].split("-->", 1)[0].strip() return title def cleanup(self): if self.synced: # delete static web pages that are not in immediate list for static_page in frappe.db.sql("""select name, page_name, parent_web_page from `tabWeb Page` where ifnull(template_path,'')!=''""", as_dict=1): if (static_page.parent_web_page, static_page.page_name) not in self.synced: frappe.delete_doc("Web Page", static_page.name, force=1) else: # delete all static web pages frappe.delete_doc("Web Page", frappe.db.sql_list("""select name from `tabWeb Page` where ifnull(template_path,'')!=''"""), force=1)
mit
-7,153,857,291,017,994,000
31.277457
136
0.6601
false
2.945148
false
false
false
pjryan126/solid-start-careers
store/api/zillow/venv/lib/python2.7/site-packages/pandas/core/window.py
1
49027
""" provide a generic structure to support window functions, similar to how we have a Groupby object """ from __future__ import division import warnings import numpy as np from collections import defaultdict import pandas as pd from pandas.lib import isscalar from pandas.core.base import PandasObject, SelectionMixin import pandas.core.common as com import pandas.algos as algos from pandas import compat from pandas.util.decorators import Substitution, Appender from textwrap import dedent _shared_docs = dict() _doc_template = """ Returns ------- same type as input See also -------- pandas.Series.%(name)s pandas.DataFrame.%(name)s """ class _Window(PandasObject, SelectionMixin): _attributes = ['window', 'min_periods', 'freq', 'center', 'win_type', 'axis'] exclusions = set() def __init__(self, obj, window=None, min_periods=None, freq=None, center=False, win_type=None, axis=0): if freq is not None: warnings.warn("The freq kw is deprecated and will be removed in a " "future version. You can resample prior to passing " "to a window function", FutureWarning, stacklevel=3) self.blocks = [] self.obj = obj self.window = window self.min_periods = min_periods self.freq = freq self.center = center self.win_type = win_type self.axis = axis self._setup() @property def _constructor(self): return Window def _setup(self): pass def _convert_freq(self, how=None): """ resample according to the how, return a new object """ obj = self._selected_obj if (self.freq is not None and isinstance(obj, (com.ABCSeries, com.ABCDataFrame))): if how is not None: warnings.warn("The how kw argument is deprecated and removed " "in a future version. You can resample prior " "to passing to a window function", FutureWarning, stacklevel=6) obj = obj.resample(self.freq).aggregate(how or 'asfreq') return obj def _create_blocks(self, how): """ split data into blocks & return conformed data """ obj = self._convert_freq(how) return obj.as_blocks(copy=False).values(), obj def _gotitem(self, key, ndim, subset=None): """ sub-classes to define return a sliced object Parameters ---------- key : string / list of selections ndim : 1,2 requested ndim of result subset : object, default None subset to act on """ # create a new object to prevent aliasing if subset is None: subset = self.obj self = self._shallow_copy(subset) self._reset_cache() if subset.ndim == 2: if isscalar(key) and key in subset or com.is_list_like(key): self._selection = key return self def __getattr__(self, attr): if attr in self._internal_names_set: return object.__getattribute__(self, attr) if attr in self.obj: return self[attr] raise AttributeError("%r object has no attribute %r" % (type(self).__name__, attr)) def _dir_additions(self): return self.obj._dir_additions() def _get_window(self, other=None): return self.window @property def _window_type(self): return self.__class__.__name__ def __unicode__(self): """ provide a nice str repr of our rolling object """ attrs = ["{k}={v}".format(k=k, v=getattr(self, k)) for k in self._attributes if getattr(self, k, None) is not None] return "{klass} [{attrs}]".format(klass=self._window_type, attrs=','.join(attrs)) def _shallow_copy(self, obj=None, **kwargs): """ return a new object with the replacement attributes """ if obj is None: obj = self._selected_obj.copy() if isinstance(obj, self.__class__): obj = obj.obj for attr in self._attributes: if attr not in kwargs: kwargs[attr] = getattr(self, attr) return self._constructor(obj, **kwargs) def _prep_values(self, values=None, kill_inf=True, how=None): if values is None: values = getattr(self._selected_obj, 'values', self._selected_obj) # GH #12373 : rolling functions error on float32 data # make sure the data is coerced to float64 if com.is_float_dtype(values.dtype): values = com._ensure_float64(values) elif com.is_integer_dtype(values.dtype): values = com._ensure_float64(values) elif com.needs_i8_conversion(values.dtype): raise NotImplementedError("ops for {action} for this " "dtype {dtype} are not " "implemented".format( action=self._window_type, dtype=values.dtype)) else: try: values = com._ensure_float64(values) except (ValueError, TypeError): raise TypeError("cannot handle this type -> {0}" "".format(values.dtype)) if kill_inf: values = values.copy() values[np.isinf(values)] = np.NaN return values def _wrap_result(self, result, block=None, obj=None): """ wrap a single result """ if obj is None: obj = self._selected_obj if isinstance(result, np.ndarray): # coerce if necessary if block is not None: if com.is_timedelta64_dtype(block.values.dtype): result = pd.to_timedelta( result.ravel(), unit='ns').values.reshape(result.shape) if result.ndim == 1: from pandas import Series return Series(result, obj.index, name=obj.name) return type(obj)(result, index=obj.index, columns=block.columns) return result def _wrap_results(self, results, blocks, obj): """ wrap the results Paramters --------- results : list of ndarrays blocks : list of blocks obj : conformed data (may be resampled) """ final = [] for result, block in zip(results, blocks): result = self._wrap_result(result, block=block, obj=obj) if result.ndim == 1: return result final.append(result) if not len(final): return obj.astype('float64') return pd.concat(final, axis=1).reindex(columns=obj.columns) def _center_window(self, result, window): """ center the result in the window """ if self.axis > result.ndim - 1: raise ValueError("Requested axis is larger then no. of argument " "dimensions") from pandas import Series, DataFrame offset = _offset(window, True) if offset > 0: if isinstance(result, (Series, DataFrame)): result = result.slice_shift(-offset, axis=self.axis) else: lead_indexer = [slice(None)] * result.ndim lead_indexer[self.axis] = slice(offset, None) result = np.copy(result[tuple(lead_indexer)]) return result def aggregate(self, arg, *args, **kwargs): result, how = self._aggregate(arg, *args, **kwargs) if result is None: return self.apply(arg, args=args, kwargs=kwargs) return result agg = aggregate _shared_docs['sum'] = dedent(""" %(name)s sum Parameters ---------- how : string, default None (DEPRECATED) Method for down- or re-sampling""") _shared_docs['mean'] = dedent(""" %(name)s mean Parameters ---------- how : string, default None (DEPRECATED) Method for down- or re-sampling""") class Window(_Window): """ Provides rolling transformations. .. versionadded:: 0.18.0 Parameters ---------- window : int Size of the moving window. This is the number of observations used for calculating the statistic. min_periods : int, default None Minimum number of observations in window required to have a value (otherwise result is NA). freq : string or DateOffset object, optional (default None) (DEPRECATED) Frequency to conform the data to before computing the statistic. Specified as a frequency string or DateOffset object. center : boolean, default False Set the labels at the center of the window. win_type : string, default None prove a window type, see the notes below axis : int, default 0 Returns ------- a Window sub-classed for the particular operation Notes ----- By default, the result is set to the right edge of the window. This can be changed to the center of the window by setting ``center=True``. The `freq` keyword is used to conform time series data to a specified frequency by resampling the data. This is done with the default parameters of :meth:`~pandas.Series.resample` (i.e. using the `mean`). The recognized window types are: * ``boxcar`` * ``triang`` * ``blackman`` * ``hamming`` * ``bartlett`` * ``parzen`` * ``bohman`` * ``blackmanharris`` * ``nuttall`` * ``barthann`` * ``kaiser`` (needs beta) * ``gaussian`` (needs std) * ``general_gaussian`` (needs power, width) * ``slepian`` (needs width). """ def _prep_window(self, **kwargs): """ provide validation for our window type, return the window """ window = self._get_window() if isinstance(window, (list, tuple, np.ndarray)): return com._asarray_tuplesafe(window).astype(float) elif com.is_integer(window): try: import scipy.signal as sig except ImportError: raise ImportError('Please install scipy to generate window ' 'weight') # the below may pop from kwargs win_type = _validate_win_type(self.win_type, kwargs) return sig.get_window(win_type, window).astype(float) raise ValueError('Invalid window %s' % str(window)) def _apply_window(self, mean=True, how=None, **kwargs): """ Applies a moving window of type ``window_type`` on the data. Parameters ---------- mean : boolean, default True If True computes weighted mean, else weighted sum how : string, default to None (DEPRECATED) how to resample Returns ------- y : type of input argument """ window = self._prep_window(**kwargs) center = self.center blocks, obj = self._create_blocks(how=how) results = [] for b in blocks: try: values = self._prep_values(b.values) except TypeError: results.append(b.values.copy()) continue if values.size == 0: results.append(values.copy()) continue offset = _offset(window, center) additional_nans = np.array([np.NaN] * offset) def f(arg, *args, **kwargs): minp = _use_window(self.min_periods, len(window)) return algos.roll_window(np.concatenate((arg, additional_nans)) if center else arg, window, minp, avg=mean) result = np.apply_along_axis(f, self.axis, values) if center: result = self._center_window(result, window) results.append(result) return self._wrap_results(results, blocks, obj) @Substitution(name='rolling') @Appender(SelectionMixin._see_also_template) @Appender(SelectionMixin._agg_doc) def aggregate(self, arg, *args, **kwargs): result, how = self._aggregate(arg, *args, **kwargs) if result is None: # these must apply directly result = arg(self) return result agg = aggregate @Substitution(name='window') @Appender(_doc_template) @Appender(_shared_docs['sum']) def sum(self, **kwargs): return self._apply_window(mean=False, **kwargs) @Substitution(name='window') @Appender(_doc_template) @Appender(_shared_docs['mean']) def mean(self, **kwargs): return self._apply_window(mean=True, **kwargs) class _Rolling(_Window): @property def _constructor(self): return Rolling def _apply(self, func, window=None, center=None, check_minp=None, how=None, **kwargs): """ Rolling statistical measure using supplied function. Designed to be used with passed-in Cython array-based functions. Parameters ---------- func : string/callable to apply window : int/array, default to _get_window() center : boolean, default to self.center check_minp : function, default to _use_window how : string, default to None (DEPRECATED) how to resample Returns ------- y : type of input """ if center is None: center = self.center if window is None: window = self._get_window() if check_minp is None: check_minp = _use_window blocks, obj = self._create_blocks(how=how) results = [] for b in blocks: try: values = self._prep_values(b.values) except TypeError: results.append(b.values.copy()) continue if values.size == 0: results.append(values.copy()) continue # if we have a string function name, wrap it if isinstance(func, compat.string_types): if not hasattr(algos, func): raise ValueError("we do not support this function " "algos.{0}".format(func)) cfunc = getattr(algos, func) def func(arg, window, min_periods=None): minp = check_minp(min_periods, window) # GH #12373: rolling functions error on float32 data return cfunc(com._ensure_float64(arg), window, minp, **kwargs) # calculation function if center: offset = _offset(window, center) additional_nans = np.array([np.NaN] * offset) def calc(x): return func(np.concatenate((x, additional_nans)), window, min_periods=self.min_periods) else: def calc(x): return func(x, window, min_periods=self.min_periods) if values.ndim > 1: result = np.apply_along_axis(calc, self.axis, values) else: result = calc(values) if center: result = self._center_window(result, window) results.append(result) return self._wrap_results(results, blocks, obj) class _Rolling_and_Expanding(_Rolling): _shared_docs['count'] = """%(name)s count of number of non-NaN observations inside provided window.""" def count(self): obj = self._convert_freq() window = self._get_window() window = min(window, len(obj)) if not self.center else window blocks, obj = self._create_blocks(how=None) results = [] for b in blocks: if com.needs_i8_conversion(b.values): result = b.notnull().astype(int) else: try: result = np.isfinite(b).astype(float) except TypeError: result = np.isfinite(b.astype(float)).astype(float) result[pd.isnull(result)] = 0 result = self._constructor(result, window=window, min_periods=0, center=self.center).sum() results.append(result) return self._wrap_results(results, blocks, obj) _shared_docs['apply'] = dedent(""" %(name)s function apply Parameters ---------- func : function Must produce a single value from an ndarray input \*args and \*\*kwargs are passed to the function""") def apply(self, func, args=(), kwargs={}): # TODO: _level is unused? _level = kwargs.pop('_level', None) # noqa window = self._get_window() offset = _offset(window, self.center) def f(arg, window, min_periods): minp = _use_window(min_periods, window) return algos.roll_generic(arg, window, minp, offset, func, args, kwargs) return self._apply(f, center=False) def sum(self, **kwargs): return self._apply('roll_sum', **kwargs) _shared_docs['max'] = dedent(""" %(name)s maximum Parameters ---------- how : string, default 'max' (DEPRECATED) Method for down- or re-sampling""") def max(self, how=None, **kwargs): if self.freq is not None and how is None: how = 'max' return self._apply('roll_max', how=how, **kwargs) _shared_docs['min'] = dedent(""" %(name)s minimum Parameters ---------- how : string, default 'min' (DEPRECATED) Method for down- or re-sampling""") def min(self, how=None, **kwargs): if self.freq is not None and how is None: how = 'min' return self._apply('roll_min', how=how, **kwargs) def mean(self, **kwargs): return self._apply('roll_mean', **kwargs) _shared_docs['median'] = dedent(""" %(name)s median Parameters ---------- how : string, default 'median' (DEPRECATED) Method for down- or re-sampling""") def median(self, how=None, **kwargs): if self.freq is not None and how is None: how = 'median' return self._apply('roll_median_c', how=how, **kwargs) _shared_docs['std'] = dedent(""" %(name)s standard deviation Parameters ---------- ddof : int, default 1 Delta Degrees of Freedom. The divisor used in calculations is ``N - ddof``, where ``N`` represents the number of elements.""") def std(self, ddof=1, **kwargs): window = self._get_window() def f(arg, *args, **kwargs): minp = _require_min_periods(1)(self.min_periods, window) return _zsqrt(algos.roll_var(arg, window, minp, ddof)) return self._apply(f, check_minp=_require_min_periods(1), **kwargs) _shared_docs['var'] = dedent(""" %(name)s variance Parameters ---------- ddof : int, default 1 Delta Degrees of Freedom. The divisor used in calculations is ``N - ddof``, where ``N`` represents the number of elements.""") def var(self, ddof=1, **kwargs): return self._apply('roll_var', check_minp=_require_min_periods(1), ddof=ddof, **kwargs) _shared_docs['skew'] = """Unbiased %(name)s skewness""" def skew(self, **kwargs): return self._apply('roll_skew', check_minp=_require_min_periods(3), **kwargs) _shared_docs['kurt'] = """Unbiased %(name)s kurtosis""" def kurt(self, **kwargs): return self._apply('roll_kurt', check_minp=_require_min_periods(4), **kwargs) _shared_docs['quantile'] = dedent(""" %(name)s quantile Parameters ---------- quantile : float 0 <= quantile <= 1""") def quantile(self, quantile, **kwargs): window = self._get_window() def f(arg, *args, **kwargs): minp = _use_window(self.min_periods, window) return algos.roll_quantile(arg, window, minp, quantile) return self._apply(f, **kwargs) _shared_docs['cov'] = dedent(""" %(name)s sample covariance Parameters ---------- other : Series, DataFrame, or ndarray, optional if not supplied then will default to self and produce pairwise output pairwise : bool, default None If False then only matching columns between self and other will be used and the output will be a DataFrame. If True then all pairwise combinations will be calculated and the output will be a Panel in the case of DataFrame inputs. In the case of missing elements, only complete pairwise observations will be used. ddof : int, default 1 Delta Degrees of Freedom. The divisor used in calculations is ``N - ddof``, where ``N`` represents the number of elements.""") def cov(self, other=None, pairwise=None, ddof=1, **kwargs): if other is None: other = self._selected_obj # only default unset pairwise = True if pairwise is None else pairwise other = self._shallow_copy(other) window = self._get_window(other) def _get_cov(X, Y): # GH #12373 : rolling functions error on float32 data # to avoid potential overflow, cast the data to float64 X = X.astype('float64') Y = Y.astype('float64') mean = lambda x: x.rolling(window, self.min_periods, center=self.center).mean(**kwargs) count = (X + Y).rolling(window=window, center=self.center).count(**kwargs) bias_adj = count / (count - ddof) return (mean(X * Y) - mean(X) * mean(Y)) * bias_adj return _flex_binary_moment(self._selected_obj, other._selected_obj, _get_cov, pairwise=bool(pairwise)) _shared_docs['corr'] = dedent(""" %(name)s sample correlation Parameters ---------- other : Series, DataFrame, or ndarray, optional if not supplied then will default to self and produce pairwise output pairwise : bool, default None If False then only matching columns between self and other will be used and the output will be a DataFrame. If True then all pairwise combinations will be calculated and the output will be a Panel in the case of DataFrame inputs. In the case of missing elements, only complete pairwise observations will be used.""") def corr(self, other=None, pairwise=None, **kwargs): if other is None: other = self._selected_obj # only default unset pairwise = True if pairwise is None else pairwise other = self._shallow_copy(other) window = self._get_window(other) def _get_corr(a, b): a = a.rolling(window=window, min_periods=self.min_periods, freq=self.freq, center=self.center) b = b.rolling(window=window, min_periods=self.min_periods, freq=self.freq, center=self.center) return a.cov(b, **kwargs) / (a.std(**kwargs) * b.std(**kwargs)) return _flex_binary_moment(self._selected_obj, other._selected_obj, _get_corr, pairwise=bool(pairwise)) class Rolling(_Rolling_and_Expanding): """ Provides rolling window calculcations. .. versionadded:: 0.18.0 Parameters ---------- window : int Size of the moving window. This is the number of observations used for calculating the statistic. min_periods : int, default None Minimum number of observations in window required to have a value (otherwise result is NA). freq : string or DateOffset object, optional (default None) (DEPRECATED) Frequency to conform the data to before computing the statistic. Specified as a frequency string or DateOffset object. center : boolean, default False Set the labels at the center of the window. axis : int, default 0 Returns ------- a Window sub-classed for the particular operation Notes ----- By default, the result is set to the right edge of the window. This can be changed to the center of the window by setting ``center=True``. The `freq` keyword is used to conform time series data to a specified frequency by resampling the data. This is done with the default parameters of :meth:`~pandas.Series.resample` (i.e. using the `mean`). """ @Substitution(name='rolling') @Appender(SelectionMixin._see_also_template) @Appender(SelectionMixin._agg_doc) def aggregate(self, arg, *args, **kwargs): return super(Rolling, self).aggregate(arg, *args, **kwargs) agg = aggregate @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['count']) def count(self): return super(Rolling, self).count() @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['apply']) def apply(self, func, args=(), kwargs={}): return super(Rolling, self).apply(func, args=args, kwargs=kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['sum']) def sum(self, **kwargs): return super(Rolling, self).sum(**kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['max']) def max(self, **kwargs): return super(Rolling, self).max(**kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['min']) def min(self, **kwargs): return super(Rolling, self).min(**kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['mean']) def mean(self, **kwargs): return super(Rolling, self).mean(**kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['median']) def median(self, **kwargs): return super(Rolling, self).median(**kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['std']) def std(self, ddof=1, **kwargs): return super(Rolling, self).std(ddof=ddof, **kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['var']) def var(self, ddof=1, **kwargs): return super(Rolling, self).var(ddof=ddof, **kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['skew']) def skew(self, **kwargs): return super(Rolling, self).skew(**kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['kurt']) def kurt(self, **kwargs): return super(Rolling, self).kurt(**kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['quantile']) def quantile(self, quantile, **kwargs): return super(Rolling, self).quantile(quantile=quantile, **kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['cov']) def cov(self, other=None, pairwise=None, ddof=1, **kwargs): return super(Rolling, self).cov(other=other, pairwise=pairwise, ddof=ddof, **kwargs) @Substitution(name='rolling') @Appender(_doc_template) @Appender(_shared_docs['corr']) def corr(self, other=None, pairwise=None, **kwargs): return super(Rolling, self).corr(other=other, pairwise=pairwise, **kwargs) class Expanding(_Rolling_and_Expanding): """ Provides expanding transformations. .. versionadded:: 0.18.0 Parameters ---------- min_periods : int, default None Minimum number of observations in window required to have a value (otherwise result is NA). freq : string or DateOffset object, optional (default None) (DEPRECATED) Frequency to conform the data to before computing the statistic. Specified as a frequency string or DateOffset object. center : boolean, default False Set the labels at the center of the window. axis : int, default 0 Returns ------- a Window sub-classed for the particular operation Notes ----- By default, the result is set to the right edge of the window. This can be changed to the center of the window by setting ``center=True``. The `freq` keyword is used to conform time series data to a specified frequency by resampling the data. This is done with the default parameters of :meth:`~pandas.Series.resample` (i.e. using the `mean`). """ _attributes = ['min_periods', 'freq', 'center', 'axis'] def __init__(self, obj, min_periods=1, freq=None, center=False, axis=0, **kwargs): return super(Expanding, self).__init__(obj=obj, min_periods=min_periods, freq=freq, center=center, axis=axis) @property def _constructor(self): return Expanding def _get_window(self, other=None): obj = self._selected_obj if other is None: return (max(len(obj), self.min_periods) if self.min_periods else len(obj)) return (max((len(obj) + len(obj)), self.min_periods) if self.min_periods else (len(obj) + len(obj))) @Substitution(name='expanding') @Appender(SelectionMixin._see_also_template) @Appender(SelectionMixin._agg_doc) def aggregate(self, arg, *args, **kwargs): return super(Expanding, self).aggregate(arg, *args, **kwargs) agg = aggregate @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['count']) def count(self, **kwargs): return super(Expanding, self).count(**kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['apply']) def apply(self, func, args=(), kwargs={}): return super(Expanding, self).apply(func, args=args, kwargs=kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['sum']) def sum(self, **kwargs): return super(Expanding, self).sum(**kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['max']) def max(self, **kwargs): return super(Expanding, self).max(**kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['min']) def min(self, **kwargs): return super(Expanding, self).min(**kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['mean']) def mean(self, **kwargs): return super(Expanding, self).mean(**kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['median']) def median(self, **kwargs): return super(Expanding, self).median(**kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['std']) def std(self, ddof=1, **kwargs): return super(Expanding, self).std(ddof=ddof, **kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['var']) def var(self, ddof=1, **kwargs): return super(Expanding, self).var(ddof=ddof, **kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['skew']) def skew(self, **kwargs): return super(Expanding, self).skew(**kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['kurt']) def kurt(self, **kwargs): return super(Expanding, self).kurt(**kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['quantile']) def quantile(self, quantile, **kwargs): return super(Expanding, self).quantile(quantile=quantile, **kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['cov']) def cov(self, other=None, pairwise=None, ddof=1, **kwargs): return super(Expanding, self).cov(other=other, pairwise=pairwise, ddof=ddof, **kwargs) @Substitution(name='expanding') @Appender(_doc_template) @Appender(_shared_docs['corr']) def corr(self, other=None, pairwise=None, **kwargs): return super(Expanding, self).corr(other=other, pairwise=pairwise, **kwargs) _bias_template = """ Parameters ---------- bias : boolean, default False Use a standard estimation bias correction """ _pairwise_template = """ Parameters ---------- other : Series, DataFrame, or ndarray, optional if not supplied then will default to self and produce pairwise output pairwise : bool, default None If False then only matching columns between self and other will be used and the output will be a DataFrame. If True then all pairwise combinations will be calculated and the output will be a Panel in the case of DataFrame inputs. In the case of missing elements, only complete pairwise observations will be used. bias : boolean, default False Use a standard estimation bias correction """ class EWM(_Rolling): r""" Provides exponential weighted functions .. versionadded:: 0.18.0 Parameters ---------- com : float, optional Specify decay in terms of center of mass, :math:`\alpha = 1 / (1 + com),\text{ for } com \geq 0` span : float, optional Specify decay in terms of span, :math:`\alpha = 2 / (span + 1),\text{ for } span \geq 1` halflife : float, optional Specify decay in terms of half-life, :math:`\alpha = 1 - exp(log(0.5) / halflife),\text{ for } halflife > 0` alpha : float, optional Specify smoothing factor :math:`\alpha` directly, :math:`0 < \alpha \leq 1` .. versionadded:: 0.18.0 min_periods : int, default 0 Minimum number of observations in window required to have a value (otherwise result is NA). freq : None or string alias / date offset object, default=None (DEPRECATED) Frequency to conform to before computing statistic adjust : boolean, default True Divide by decaying adjustment factor in beginning periods to account for imbalance in relative weightings (viewing EWMA as a moving average) ignore_na : boolean, default False Ignore missing values when calculating weights; specify True to reproduce pre-0.15.0 behavior Returns ------- a Window sub-classed for the particular operation Notes ----- Exactly one of center of mass, span, half-life, and alpha must be provided. Allowed values and relationship between the parameters are specified in the parameter descriptions above; see the link at the end of this section for a detailed explanation. The `freq` keyword is used to conform time series data to a specified frequency by resampling the data. This is done with the default parameters of :meth:`~pandas.Series.resample` (i.e. using the `mean`). When adjust is True (default), weighted averages are calculated using weights (1-alpha)**(n-1), (1-alpha)**(n-2), ..., 1-alpha, 1. When adjust is False, weighted averages are calculated recursively as: weighted_average[0] = arg[0]; weighted_average[i] = (1-alpha)*weighted_average[i-1] + alpha*arg[i]. When ignore_na is False (default), weights are based on absolute positions. For example, the weights of x and y used in calculating the final weighted average of [x, None, y] are (1-alpha)**2 and 1 (if adjust is True), and (1-alpha)**2 and alpha (if adjust is False). When ignore_na is True (reproducing pre-0.15.0 behavior), weights are based on relative positions. For example, the weights of x and y used in calculating the final weighted average of [x, None, y] are 1-alpha and 1 (if adjust is True), and 1-alpha and alpha (if adjust is False). More details can be found at http://pandas.pydata.org/pandas-docs/stable/computation.html#exponentially-weighted-windows """ _attributes = ['com', 'min_periods', 'freq', 'adjust', 'ignore_na', 'axis'] def __init__(self, obj, com=None, span=None, halflife=None, alpha=None, min_periods=0, freq=None, adjust=True, ignore_na=False, axis=0): self.obj = obj self.com = _get_center_of_mass(com, span, halflife, alpha) self.min_periods = min_periods self.freq = freq self.adjust = adjust self.ignore_na = ignore_na self.axis = axis @property def _constructor(self): return EWM @Substitution(name='ewm') @Appender(SelectionMixin._see_also_template) @Appender(SelectionMixin._agg_doc) def aggregate(self, arg, *args, **kwargs): return super(EWM, self).aggregate(arg, *args, **kwargs) agg = aggregate def _apply(self, func, how=None, **kwargs): """Rolling statistical measure using supplied function. Designed to be used with passed-in Cython array-based functions. Parameters ---------- func : string/callable to apply how : string, default to None (DEPRECATED) how to resample Returns ------- y : type of input argument """ blocks, obj = self._create_blocks(how=how) results = [] for b in blocks: try: values = self._prep_values(b.values) except TypeError: results.append(b.values.copy()) continue if values.size == 0: results.append(values.copy()) continue # if we have a string function name, wrap it if isinstance(func, compat.string_types): if not hasattr(algos, func): raise ValueError("we do not support this function " "algos.{0}".format(func)) cfunc = getattr(algos, func) def func(arg): return cfunc(arg, self.com, int(self.adjust), int(self.ignore_na), int(self.min_periods)) results.append(np.apply_along_axis(func, self.axis, values)) return self._wrap_results(results, blocks, obj) @Substitution(name='ewm') @Appender(_doc_template) def mean(self, **kwargs): """exponential weighted moving average""" return self._apply('ewma', **kwargs) @Substitution(name='ewm') @Appender(_doc_template) @Appender(_bias_template) def std(self, bias=False, **kwargs): """exponential weighted moving stddev""" return _zsqrt(self.var(bias=bias, **kwargs)) vol = std @Substitution(name='ewm') @Appender(_doc_template) @Appender(_bias_template) def var(self, bias=False, **kwargs): """exponential weighted moving variance""" def f(arg): return algos.ewmcov(arg, arg, self.com, int(self.adjust), int(self.ignore_na), int(self.min_periods), int(bias)) return self._apply(f, **kwargs) @Substitution(name='ewm') @Appender(_doc_template) @Appender(_pairwise_template) def cov(self, other=None, pairwise=None, bias=False, **kwargs): """exponential weighted sample covariance""" if other is None: other = self._selected_obj # only default unset pairwise = True if pairwise is None else pairwise other = self._shallow_copy(other) def _get_cov(X, Y): X = self._shallow_copy(X) Y = self._shallow_copy(Y) cov = algos.ewmcov(X._prep_values(), Y._prep_values(), self.com, int(self.adjust), int(self.ignore_na), int(self.min_periods), int(bias)) return X._wrap_result(cov) return _flex_binary_moment(self._selected_obj, other._selected_obj, _get_cov, pairwise=bool(pairwise)) @Substitution(name='ewm') @Appender(_doc_template) @Appender(_pairwise_template) def corr(self, other=None, pairwise=None, **kwargs): """exponential weighted sample correlation""" if other is None: other = self._selected_obj # only default unset pairwise = True if pairwise is None else pairwise other = self._shallow_copy(other) def _get_corr(X, Y): X = self._shallow_copy(X) Y = self._shallow_copy(Y) def _cov(x, y): return algos.ewmcov(x, y, self.com, int(self.adjust), int(self.ignore_na), int(self.min_periods), 1) x_values = X._prep_values() y_values = Y._prep_values() cov = _cov(x_values, y_values) x_var = _cov(x_values, x_values) y_var = _cov(y_values, y_values) corr = cov / _zsqrt(x_var * y_var) return X._wrap_result(corr) return _flex_binary_moment(self._selected_obj, other._selected_obj, _get_corr, pairwise=bool(pairwise)) # Helper Funcs def _flex_binary_moment(arg1, arg2, f, pairwise=False): from pandas import Series, DataFrame, Panel if not (isinstance(arg1, (np.ndarray, Series, DataFrame)) and isinstance(arg2, (np.ndarray, Series, DataFrame))): raise TypeError("arguments to moment function must be of type " "np.ndarray/Series/DataFrame") if (isinstance(arg1, (np.ndarray, Series)) and isinstance(arg2, (np.ndarray, Series))): X, Y = _prep_binary(arg1, arg2) return f(X, Y) elif isinstance(arg1, DataFrame): def dataframe_from_int_dict(data, frame_template): result = DataFrame(data, index=frame_template.index) if len(result.columns) > 0: result.columns = frame_template.columns[result.columns] return result results = {} if isinstance(arg2, DataFrame): if pairwise is False: if arg1 is arg2: # special case in order to handle duplicate column names for i, col in enumerate(arg1.columns): results[i] = f(arg1.iloc[:, i], arg2.iloc[:, i]) return dataframe_from_int_dict(results, arg1) else: if not arg1.columns.is_unique: raise ValueError("'arg1' columns are not unique") if not arg2.columns.is_unique: raise ValueError("'arg2' columns are not unique") X, Y = arg1.align(arg2, join='outer') X = X + 0 * Y Y = Y + 0 * X res_columns = arg1.columns.union(arg2.columns) for col in res_columns: if col in X and col in Y: results[col] = f(X[col], Y[col]) return DataFrame(results, index=X.index, columns=res_columns) elif pairwise is True: results = defaultdict(dict) for i, k1 in enumerate(arg1.columns): for j, k2 in enumerate(arg2.columns): if j < i and arg2 is arg1: # Symmetric case results[i][j] = results[j][i] else: results[i][j] = f(*_prep_binary(arg1.iloc[:, i], arg2.iloc[:, j])) p = Panel.from_dict(results).swapaxes('items', 'major') if len(p.major_axis) > 0: p.major_axis = arg1.columns[p.major_axis] if len(p.minor_axis) > 0: p.minor_axis = arg2.columns[p.minor_axis] return p else: raise ValueError("'pairwise' is not True/False") else: results = {} for i, col in enumerate(arg1.columns): results[i] = f(*_prep_binary(arg1.iloc[:, i], arg2)) return dataframe_from_int_dict(results, arg1) else: return _flex_binary_moment(arg2, arg1, f) def _get_center_of_mass(com, span, halflife, alpha): valid_count = len([x for x in [com, span, halflife, alpha] if x is not None]) if valid_count > 1: raise ValueError("com, span, halflife, and alpha " "are mutually exclusive") # Convert to center of mass; domain checks ensure 0 < alpha <= 1 if com is not None: if com < 0: raise ValueError("com must satisfy: com >= 0") elif span is not None: if span < 1: raise ValueError("span must satisfy: span >= 1") com = (span - 1) / 2. elif halflife is not None: if halflife <= 0: raise ValueError("halflife must satisfy: halflife > 0") decay = 1 - np.exp(np.log(0.5) / halflife) com = 1 / decay - 1 elif alpha is not None: if alpha <= 0 or alpha > 1: raise ValueError("alpha must satisfy: 0 < alpha <= 1") com = (1.0 - alpha) / alpha else: raise ValueError("Must pass one of com, span, halflife, or alpha") return float(com) def _offset(window, center): if not com.is_integer(window): window = len(window) offset = (window - 1) / 2. if center else 0 try: return int(offset) except: return offset.astype(int) def _require_min_periods(p): def _check_func(minp, window): if minp is None: return window else: return max(p, minp) return _check_func def _use_window(minp, window): if minp is None: return window else: return minp def _zsqrt(x): result = np.sqrt(x) mask = x < 0 from pandas import DataFrame if isinstance(x, DataFrame): if mask.values.any(): result[mask] = 0 else: if mask.any(): result[mask] = 0 return result def _prep_binary(arg1, arg2): if not isinstance(arg2, type(arg1)): raise Exception('Input arrays must be of the same type!') # mask out values, this also makes a common index... X = arg1 + 0 * arg2 Y = arg2 + 0 * arg1 return X, Y def _validate_win_type(win_type, kwargs): # may pop from kwargs arg_map = {'kaiser': ['beta'], 'gaussian': ['std'], 'general_gaussian': ['power', 'width'], 'slepian': ['width']} if win_type in arg_map: return tuple([win_type] + _pop_args(win_type, arg_map[win_type], kwargs)) return win_type def _pop_args(win_type, arg_names, kwargs): msg = '%s window requires %%s' % win_type all_args = [] for n in arg_names: if n not in kwargs: raise ValueError(msg % n) all_args.append(kwargs.pop(n)) return all_args # Top-level exports def rolling(obj, win_type=None, **kwds): from pandas import Series, DataFrame if not isinstance(obj, (Series, DataFrame)): raise TypeError('invalid type: %s' % type(obj)) if win_type is not None: return Window(obj, win_type=win_type, **kwds) return Rolling(obj, **kwds) rolling.__doc__ = Window.__doc__ def expanding(obj, **kwds): from pandas import Series, DataFrame if not isinstance(obj, (Series, DataFrame)): raise TypeError('invalid type: %s' % type(obj)) return Expanding(obj, **kwds) expanding.__doc__ = Expanding.__doc__ def ewm(obj, **kwds): from pandas import Series, DataFrame if not isinstance(obj, (Series, DataFrame)): raise TypeError('invalid type: %s' % type(obj)) return EWM(obj, **kwds) ewm.__doc__ = EWM.__doc__
gpl-2.0
-124,270,254,565,814,900
32.419905
95
0.571155
false
4.108867
false
false
false
ternaris/marv-robotics
code/marv/marv_node/stream.py
1
5996
# Copyright 2016 - 2018 Ternaris. # SPDX-License-Identifier: AGPL-3.0-only # pylint: disable=import-outside-toplevel from collections import deque from itertools import count from numbers import Integral from capnp.lib.capnp import KjException from marv_api.setid import SetID from marv_api.utils import NOTSET, err from marv_pycapnp import Wrapper from .mixins import Keyed, LoggerMixin, Request, Task class RequestedMessageTooOld(Exception): """Indicate a message requested from a stream is not in memory anymore.""" class Handle(Keyed): @property def key(self): return (self.setid, self.node, self.name) @property def key_abbrev(self): return '.'.join([self.setid.abbrev, self.node.abbrev, self.name]) def __init__(self, setid, node, name, group=None, header=None): # pylint: disable=too-many-arguments from .node import Node assert isinstance(setid, SetID), setid assert isinstance(node, Node), node self.setid = setid self.node = node self.name = name self.header = header or {} self.group = group if group is not None else self.node.group self._counter = count(-1) # -1 will be the stream's handle def msg(self, __msg=None, _schema=NOTSET, **kw): from .io import TheEnd assert (__msg is not None) ^ bool(kw), (__msg, kw) data = kw if __msg is None else __msg if self.group: assert isinstance(data, (Handle, TheEnd)), (self, data) elif not isinstance(data, (Wrapper, Handle, TheEnd)): if _schema is NOTSET: from marv_api.ioctrl import NODE_SCHEMA # pylint: disable=import-outside-toplevel schema = NODE_SCHEMA.get() else: schema = _schema if schema is not None: try: data = Wrapper.from_dict(schema, data) except KjException: from pprint import pformat # pylint: disable=import-outside-toplevel _node = schema.schema.node err(f'Schema violation for {_node.displayName} with data:\n' f'{pformat(data)}\nschema: {_node.displayName}') raise return Msg(next(self._counter), self, data) def finish(self): from .io import THEEND return self.msg(THEEND) def create_stream(self, name, group=False, **header): assert self.group from .io import CreateStream return CreateStream(self, name, group, header) def make_file(self, name): from .io import MakeFile return MakeFile(self, name) def __getattr__(self, name): try: return self.header[name] except KeyError: raise AttributeError(name) def __repr__(self): return f'<{type(self).__name__} {self.key_abbrev}>' class Msg(Keyed): @property def key(self): return (self._idx, self._handle) @property def idx(self): return self._idx @property def handle(self): return self._handle @property def data(self): return self._data def __init__(self, idx=None, handle=None, data=None): assert idx is None or isinstance(idx, Integral), idx assert isinstance(handle, Handle), handle self._idx = idx self._handle = handle self._data = data def __iter__(self): return iter([self._idx, self._handle, self._data]) def __repr__(self): from .io import THEEND flags = ' HANDLE' if isinstance(self.data, Handle) else \ ' THEEND' if self.data is THEEND else '' return f'Msg({self._idx}, {self.handle!r}{flags})' Task.register(Msg) # TODO: maybe not Request.register(Msg) class Stream(Keyed, LoggerMixin): CACHESIZE = 50 cache = None ended = None handle = None @property def key(self): return self.handle.key @property def key_abbrev(self): return self.handle.key_abbrev @property def setid(self): return self.handle.setid @property def node(self): return self.handle.node @property def name(self): return self.handle.name @property def group(self): return self.handle.group def info(self): return [repr(msg) for msg in self.cache] # pylint: disable=not-an-iterable def __repr__(self): return f'<{type(self).__name__} {self.key_abbrev}>' Task.register(Stream) class VolatileStream(Stream): def __init__(self, handle, parent=None): self.handle = handle self.parent = parent self.cache = deque((), self.CACHESIZE) def add_msg(self, msg): from .io import THEEND assert msg.handle == self.handle assert msg.idx is not None if self.group: assert isinstance(msg.data, Handle) or msg.data is THEEND, (self, msg) else: assert not isinstance(msg.data, Handle) or msg.idx == -1, (self, msg) expected_idx = self.cache[0].idx + 1 if self.cache else -1 assert msg.idx == expected_idx, (msg.idx, expected_idx) self.cache.appendleft(msg) if msg.data is THEEND: self.ended = True self.logdebug('added %r', msg) def get_msg(self, req): assert req.handle == self.handle offset = self.cache[0].idx - req.idx if self.cache else -1 if offset < 0: return None try: msg = self.cache[offset] except IndexError: raise RequestedMessageTooOld(req, offset) assert msg.data is not None self.logdebug('return %r', msg) return msg def create_stream(self, name, group, header=None): # TODO: check name collision? handle = Handle(self.setid, self.node, name, group=group, header=header) return type(self)(handle, parent=self)
agpl-3.0
6,142,566,103,589,658,000
27.688995
98
0.594563
false
3.85843
false
false
false
satheeshgopalan/python
CurrencyConverter.py
1
1588
""" GUI APP to convert currency 1 to currency 2's value given exchange rate #Author : Satheesh Gopalan """ import Tkinter from Tkinter import * import tkMessageBox def OnClick(): """ Handle the Button click's Gets input Verifies that input is valid """ currency1 = 0 exchange_rate = 0 state = 0 try: currency1 = float(E1.get()) exchange_rate = float(E2.get()) except ValueError: print "Bad Input!" state = 1 E1.delete(0, END) E2.delete(0,END) result(currency1,exchange_rate,state) def result(x,y,z): """ Print the Result in message box Variable z is to verify that input are valid (in GOOD STATE) Variable x and y are inputs """ if z ==1 : tkMessageBox.showinfo("RESULTS", "Invalid Input !! \n Please Try Again !! \n\n Press \"OK\" to quit.") root.quit() else : r = x * y tkMessageBox.showinfo("RESULTS", "Value in Currency 2 is : " + str(r) + "\n Rounded off Value :" + str(round(r,2))) # GUI root = Tk() root.title("Currency Converter") #Labels & Enteries L1 = Label(root, text="Currency 1 Amount ") L1.pack() L1.grid_location(0,0) E1 = Entry(root , bd =5 ) E1.pack() L1.grid_location(20,0) L2 = Label(root, text="Exchange Rate ") L2.pack() L2.grid_location(0,20) E2 = Entry(root , bd =5 ) E2.pack() L2.grid_location(20,30) #BUTTON B = Tkinter.Button(root, text ="Convert!",command = OnClick ) B.pack() B.grid_location(30,30) #LOOP IT TILL YOU MAKE IT root.mainloop()
mit
-4,386,792,213,332,889,600
19.358974
123
0.600126
false
3.125984
false
false
false
Gab0/gekkoJaponicus
evaluation/gekko/statistics.py
1
2549
#!/bin/python from deap import tools import numpy as np epochStatisticsNames = { 'avg': 'Average profit', 'std': 'Profit variation', 'min': 'Minimum profit', 'max': 'Maximum profit', 'size': 'Population size', 'maxsize': 'Max population size', 'avgTrades': 'Avg trade number', 'sharpe': 'Avg sharpe ratio', 'avgExposure': "Avg exposure time", 'nbElderDies': 'Elder dies count' } periodicStatisticsNames = { 'evaluationScore': "Evaluation Score", 'evaluationScoreOnSecondary': "Score on Secondary Dataset" } def compileStats(locale): # --get proper evolution statistics; Stats = locale.stats.compile(locale.population) Stats['dateRange'] = ' '.join([DR.textDaterange() for DR in locale.Dataset])\ if not locale.EPOCH else None Stats['maxsize'] = locale.POP_SIZE Stats['size'] = len(locale.population) Stats['avgTrades'] = locale.extraStats['avgTrades'] Stats['avgExposure'] = locale.extraStats['avgExposure'] #Stats['nbElderDies'] = locale.extraStats['nbElderDies'] Stats['sharpe'] = np.mean([x.fitness.values[1] for x in locale.population]) Stats['evaluationScoreOnSecondary'] = locale.lastEvaluationOnSecondary Stats['evaluationScore'] = locale.lastEvaluation locale.lastEvaluationOnSecondary = None locale.lastEvaluation = None Stats['id'] = locale.EPOCH locale.EvolutionStatistics.append(Stats) locale.World.logger.write_evolution_logs( locale.EPOCH, locale.EvolutionStatistics, locale.name ) def showStatistics(locale): # show information; Stats = locale.EvolutionStatistics[locale.EPOCH] print("EPOCH %i\t&%i" % (locale.EPOCH, locale.extraStats['nb_evaluated'])) statnames = ['max', 'avg', 'min', 'std', 'size', 'maxsize', 'avgTrades', 'sharpe', 'avgExposure', # 'nbElderDies' ] statisticsText = [] for s in range(len(statnames)): SNAME = statnames[s] SVAL = Stats[SNAME] currentStatisticsText = "%s" % epochStatisticsNames[SNAME] if not SVAL % 1: currentStatisticsText += " %i" % SVAL else: currentStatisticsText += " %.3f" % SVAL statisticsText.append(currentStatisticsText) columnWidth = max([len(STXT) for STXT in statisticsText]) + 3 for j in range(0, len(statisticsText), 2): print(''.join(word.ljust(columnWidth) for word in statisticsText[j:j+2])) print()
mit
-2,450,411,621,466,724,000
33.917808
81
0.633582
false
3.667626
false
false
false
tclose/FouTS
sysconf/linux.py
1
1244
from sysconf.common import * obj_suffix = '.o' exe_suffix = '' lib_prefix = 'lib' lib_suffix = '.so' cpp = [ 'g++', '-c', '$flags$', '$path$', '$src$', '-o', '$obj$' ] cpp_flags = [ '-Wall', '-march=x86-64', '-fPIC', '-Wno-deprecated'] cflags_thread = [] ld = [ 'g++', '$flags$', '$path$', '$obj$', '$mrtrix$', '-o', '$bin$' ] ld_flags = [ '-lm', '-L/usr/lib/sse2', '-lcblas', '-latlas', '-llapack_atlas', '-lgfortran', '-L/usr/local/lib/' '-lgslcblas', '-lgsl' ] ld_flags_lib_prefix = '-l' libs_thread = [ '-lpthread' ] ld_lib = [ 'g++', '-shared', '$flags$', '$obj$', '-o', '$lib$' ] ld_lib_flags = [] cpp_flags_debug = cpp_flags + [ '-O0', '-g3', '-D_GLIBCXX_DEBUG' ] ld_flags_debug = ld_flags + [ '-g3' ] ld_lib_flags_debug = ld_lib_flags + [ '-g3' ] cpp_flags += [ '-DNDEBUG', '-DOPTIMISED'] cpp_flags_profile = cpp_flags + [ '-g', '-pg' ] ld_flags_profile = ld_flags + [ '-g', '-pg' ] ld_lib_flags_profile = ld_lib_flags + [ '-g', '-pg' ] cpp_flags += [ '-O3' ] ld_flags_gl = [] pkgconfig = [ 'pkg-config' ] pkgconfig_env = None # Qt4 settings: qt4_path = '/usr/include/qt4' qt4_include_path = [ qt4_path ] + [ qt4_path + '/Qt' + entry for entry in qt4_modules ] qt4_lib_flags += [ '-lQt' + entry for entry in qt4_modules ]
gpl-3.0
4,048,223,657,805,592,600
28.619048
136
0.545016
false
2.424951
false
false
false
stephane-martin/salt-debian-packaging
salt-2016.3.2/salt/modules/influx.py
2
16445
# -*- coding: utf-8 -*- ''' InfluxDB - A distributed time series database Module to provide InfluxDB compatibility to Salt (compatible with InfluxDB version 0.5+) .. versionadded:: 2014.7.0 :depends: - influxdb Python module :configuration: This module accepts connection configuration details either as parameters or as configuration settings in /etc/salt/minion on the relevant minions:: influxdb.host: 'localhost' influxdb.port: 8086 influxdb.user: 'root' influxdb.password: 'root' This data can also be passed into pillar. Options passed into opts will overwrite options passed into pillar. ''' # Import Python libs from __future__ import absolute_import try: import influxdb HAS_INFLUXDB = True except ImportError: HAS_INFLUXDB = False import logging log = logging.getLogger(__name__) # Define the module's virtual name __virtualname__ = 'influxdb' def __virtual__(): ''' Only load if influxdb lib is present ''' if HAS_INFLUXDB: return __virtualname__ return (False, 'The influx execution module cannot be loaded: influxdb library not available.') def _client(user=None, password=None, host=None, port=None): if not user: user = __salt__['config.option']('influxdb.user', 'root') if not password: password = __salt__['config.option']('influxdb.password', 'root') if not host: host = __salt__['config.option']('influxdb.host', 'localhost') if not port: port = __salt__['config.option']('influxdb.port', 8086) return influxdb.InfluxDBClient( host=host, port=port, username=user, password=password) def db_list(user=None, password=None, host=None, port=None): ''' List all InfluxDB databases user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.db_list salt '*' influxdb.db_list <user> <password> <host> <port> ''' client = _client(user=user, password=password, host=host, port=port) return client.get_list_database() def db_exists(name, user=None, password=None, host=None, port=None): ''' Checks if a database exists in InfluxDB name Database name to create user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.db_exists <name> salt '*' influxdb.db_exists <name> <user> <password> <host> <port> ''' dbs = db_list(user, password, host, port) if not isinstance(dbs, list): return False return name in [db['name'] for db in dbs] def db_create(name, user=None, password=None, host=None, port=None): ''' Create a database name Database name to create user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.db_create <name> salt '*' influxdb.db_create <name> <user> <password> <host> <port> ''' if db_exists(name, user, password, host, port): log.info('DB \'{0}\' already exists'.format(name)) return False client = _client(user=user, password=password, host=host, port=port) client.create_database(name) return True def db_remove(name, user=None, password=None, host=None, port=None): ''' Remove a database name Database name to remove user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.db_remove <name> salt '*' influxdb.db_remove <name> <user> <password> <host> <port> ''' if not db_exists(name, user, password, host, port): log.info('DB \'{0}\' does not exist'.format(name)) return False client = _client(user=user, password=password, host=host, port=port) return client.delete_database(name) def user_list(database=None, user=None, password=None, host=None, port=None): ''' List cluster admins or database users. If a database is specified: it will return database users list. If a database is not specified: it will return cluster admins list. database The database to list the users from user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.user_list salt '*' influxdb.user_list <database> salt '*' influxdb.user_list <database> <user> <password> <host> <port> ''' client = _client(user=user, password=password, host=host, port=port) if database: client.switch_database(database) if hasattr(client, 'get_list_cluster_admins') and not database: return client.get_list_cluster_admins() return client.get_list_users() def user_exists( name, database=None, user=None, password=None, host=None, port=None): ''' Checks if a cluster admin or database user exists. If a database is specified: it will check for database user existence. If a database is not specified: it will check for cluster admin existence. name User name database The database to check for the user to exist user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.user_exists <name> salt '*' influxdb.user_exists <name> <database> salt '*' influxdb.user_exists <name> <database> <user> <password> <host> <port> ''' users = user_list(database, user, password, host, port) if not isinstance(users, list): return False for user in users: # the dict key could be different depending on influxdb version username = user.get('user', user.get('name')) if username: if username == name: return True else: log.warning('Could not find username in user: %s', user) return False def user_create(name, passwd, database=None, user=None, password=None, host=None, port=None): ''' Create a cluster admin or a database user. If a database is specified: it will create database user. If a database is not specified: it will create a cluster admin. name User name for the new user to create passwd Password for the new user to create database The database to create the user in user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.user_create <name> <passwd> salt '*' influxdb.user_create <name> <passwd> <database> salt '*' influxdb.user_create <name> <passwd> <database> <user> <password> <host> <port> ''' if user_exists(name, database, user, password, host, port): if database: log.info('User \'{0}\' already exists for DB \'{1}\''.format( name, database)) else: log.info('Cluster admin \'{0}\' already exists'.format(name)) return False client = _client(user=user, password=password, host=host, port=port) if database: client.switch_database(database) # influxdb 0.9+ if hasattr(client, 'create_user'): client.create_user(name, passwd) return True # influxdb 0.8 and older if database: return client.add_database_user(name, passwd) return client.add_cluster_admin(name, passwd) def user_chpass(name, passwd, database=None, user=None, password=None, host=None, port=None): ''' Change password for a cluster admin or a database user. If a database is specified: it will update database user password. If a database is not specified: it will update cluster admin password. name User name for whom to change the password passwd New password database The database on which to operate user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.user_chpass <name> <passwd> salt '*' influxdb.user_chpass <name> <passwd> <database> salt '*' influxdb.user_chpass <name> <passwd> <database> <user> <password> <host> <port> ''' if not user_exists(name, database, user, password, host, port): if database: log.info( 'User \'{0}\' does not exist for DB \'{1}\''.format( name, database ) ) else: log.info('Cluster admin \'{0}\' does not exist'.format(name)) return False client = _client(user=user, password=password, host=host, port=port) if database: client.switch_database(database) return client.update_database_user_password(name, passwd) return client.update_cluster_admin_password(name, passwd) def user_remove(name, database=None, user=None, password=None, host=None, port=None): ''' Remove a cluster admin or a database user. If a database is specified: it will remove the database user. If a database is not specified: it will remove the cluster admin. name User name to remove database The database to remove the user from user User name for the new user to delete user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.user_remove <name> salt '*' influxdb.user_remove <name> <database> salt '*' influxdb.user_remove <name> <database> <user> <password> <host> <port> ''' if not user_exists(name, database, user, password, host, port): if database: log.info( 'User \'{0}\' does not exist for DB \'{1}\''.format( name, database ) ) else: log.info('Cluster admin \'{0}\' does not exist'.format(name)) return False client = _client(user=user, password=password, host=host, port=port) if database: client.switch_database(database) return client.delete_database_user(name) return client.delete_cluster_admin(name) def retention_policy_get(database, name, user=None, password=None, host=None, port=None): ''' Get an existing retention policy. database The database to operate on. name Name of the policy to modify. CLI Example: .. code-block:: bash salt '*' influxdb.retention_policy_get metrics default ''' client = _client(user=user, password=password, host=host, port=port) for policy in client.get_list_retention_policies(database): if policy['name'] == name: return policy return None def retention_policy_exists(database, name, user=None, password=None, host=None, port=None): ''' Check if a retention policy exists. database The database to operate on. name Name of the policy to modify. CLI Example: .. code-block:: bash salt '*' influxdb.retention_policy_exists metrics default ''' policy = retention_policy_get(database, name, user, password, host, port) return policy is not None def retention_policy_add(database, name, duration, replication, default=False, user=None, password=None, host=None, port=None): ''' Add a retention policy. database The database to operate on. name Name of the policy to modify. duration How long InfluxDB keeps the data. replication How many copies of the data are stored in the cluster. default Whether this policy should be the default or not. Default is False. CLI Example: .. code-block:: bash salt '*' influxdb.retention_policy_add metrics default 1d 1 ''' client = _client(user=user, password=password, host=host, port=port) client.create_retention_policy(name, duration, replication, database, default) return True def retention_policy_alter(database, name, duration, replication, default=False, user=None, password=None, host=None, port=None): ''' Modify an existing retention policy. database The database to operate on. name Name of the policy to modify. duration How long InfluxDB keeps the data. replication How many copies of the data are stored in the cluster. default Whether this policy should be the default or not. Default is False. CLI Example: .. code-block:: bash salt '*' influxdb.retention_policy_modify metrics default 1d 1 ''' client = _client(user=user, password=password, host=host, port=port) client.alter_retention_policy(name, database, duration, replication, default) return True def query(database, query, time_precision='s', chunked=False, user=None, password=None, host=None, port=None): ''' Querying data database The database to query query Query to be executed time_precision Time precision to use ('s', 'm', or 'u') chunked Whether is chunked or not user The user to connect as password The password of the user host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.query <database> <query> salt '*' influxdb.query <database> <query> <time_precision> <chunked> <user> <password> <host> <port> ''' client = _client(user=user, password=password, host=host, port=port) client.switch_database(database) return client.query(query, time_precision=time_precision, chunked=chunked) def login_test(name, password, database=None, host=None, port=None): ''' Checks if a credential pair can log in at all. If a database is specified: it will check for database user existence. If a database is not specified: it will check for cluster admin existence. name The user to connect as password The password of the user database The database to try to log in to host The host to connect to port The port to connect to CLI Example: .. code-block:: bash salt '*' influxdb.login_test <name> salt '*' influxdb.login_test <name> <database> salt '*' influxdb.login_test <name> <database> <user> <password> <host> <port> ''' try: client = _client(user=name, password=password, host=host, port=port) client.get_list_database() return True except influxdb.client.InfluxDBClientError as e: if e.code == 401: return False else: raise
apache-2.0
-6,111,658,236,898,173,000
23.954476
109
0.593676
false
4.193014
false
false
false
splanger/dbmake
dbmake/helper.py
1
1625
import pyclbr import re def find_string_between(s, start, end): """ Returns a substring between two other substrings :param s: Text :param start: A substring to start extracting the desired substring from :param end: A substring to stop extracting the desired substring on :return: """ return re.search('%s(.*)%s' % (start, end), s).group(1) def underscore_to_camelcase(s): l = s.split("_") return "".join(map(str.capitalize, l[:])) def camelcase(s, delimeter="_"): l = s.split(delimeter) return "".join(map(str.capitalize, l[:])) def get_module_classes(module_name): return pyclbr.readmodule(module_name).keys() def get_class(fully_qualified_path): # Source: http://stackoverflow.com/questions/452969/does-python-have-an-equivalent-to-java-class-forname parts = fully_qualified_path.split('.') module = ".".join(parts[:-1]) m = __import__(module) for comp in parts[1:]: m = getattr(m, comp) return m def get_class_instance(fully_qualified_path, module_name, class_name, *instantiation): """ Returns an instantiated class for the given string descriptors :param fully_qualified_path: The path to the module eg("Utilities.Printer") :param module_name: The module name eg("Printer") :param class_name: The class name eg("ScreenPrinter") :param instantiation: Any fields required to instantiate the class :return: An instance of the class """ p = __import__(fully_qualified_path) m = getattr(p, module_name) c = getattr(m, class_name) instance = c(*instantiation) return instance
apache-2.0
-2,724,419,628,980,167,700
29.660377
108
0.669538
false
3.611111
false
false
false
sobomax/virtualbox_64bit_edd
src/VBox/ValidationKit/testmanager/webui/wuireport.py
2
6010
# -*- coding: utf-8 -*- # $Id: wuireport.py $ """ Test Manager WUI - Reports. """ __copyright__ = \ """ Copyright (C) 2012-2015 Oracle Corporation This file is part of VirtualBox Open Source Edition (OSE), as available from http://www.virtualbox.org. This file is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License (GPL) as published by the Free Software Foundation, in version 2 as it comes in the "COPYING" file of the VirtualBox OSE distribution. VirtualBox OSE is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY of any kind. The contents of this file may alternatively be used under the terms of the Common Development and Distribution License Version 1.0 (CDDL) only, as it comes in the "COPYING.CDDL" file of the VirtualBox OSE distribution, in which case the provisions of the CDDL are applicable instead of those of the GPL. You may elect to license modified versions of this file under the terms and conditions of either the GPL or the CDDL or both. """ __version__ = "$Revision: 100880 $" # Validation Kit imports. from testmanager.webui.wuicontentbase import WuiContentBase; from testmanager.webui.wuihlpgraph import WuiHlpGraphDataTable, WuiHlpBarGraph; from testmanager.core.report import ReportModelBase; class WuiReportBase(WuiContentBase): """ Base class for the reports. """ def __init__(self, oModel, dParams, fSubReport = False, fnDPrint = None, oDisp = None): WuiContentBase.__init__(self, fnDPrint = fnDPrint, oDisp = oDisp); self._oModel = oModel; self._dParams = dParams; self._fSubReport = fSubReport; self._sTitle = None; def generateNavigator(self, sWhere): """ Generates the navigator (manipulate _dParams). Returns HTML. """ assert sWhere == 'top' or sWhere == 'bottom'; return ''; def generateReportBody(self): """ This is overridden by the child class to generate the report. Returns HTML. """ return '<h3>Must override generateReportBody!</h3>'; def show(self): """ Generate the report. Returns (sTitle, HTML). """ sTitle = self._sTitle if self._sTitle is not None else type(self).__name__; sReport = self.generateReportBody(); if not self._fSubReport: sReport = self.generateNavigator('top') + sReport + self.generateNavigator('bottom'); sTitle = self._oModel.sSubject + ' - ' + sTitle; ## @todo add subject to title in a proper way! return (sTitle, sReport); class WuiReportSuccessRate(WuiReportBase): """ Generates a report displaying the success rate over time. """ def generateReportBody(self): self._sTitle = 'Success rate'; adPeriods = self._oModel.getSuccessRates(); sReport = ''; oTable = WuiHlpGraphDataTable('Period', [ 'Succeeded', 'Skipped', 'Failed' ]); #for i in range(len(adPeriods) - 1, -1, -1): for i in range(len(adPeriods)): dStatuses = adPeriods[i]; cSuccess = dStatuses[ReportModelBase.ksTestStatus_Success] + dStatuses[ReportModelBase.ksTestStatus_Skipped]; cTotal = cSuccess + dStatuses[ReportModelBase.ksTestStatus_Failure]; sPeriod = self._oModel.getPeriodDesc(i); if cTotal > 0: iPctSuccess = dStatuses[ReportModelBase.ksTestStatus_Success] * 100 / cTotal; iPctSkipped = dStatuses[ReportModelBase.ksTestStatus_Skipped] * 100 / cTotal; iPctFailure = dStatuses[ReportModelBase.ksTestStatus_Failure] * 100 / cTotal; oTable.addRow(sPeriod, [ iPctSuccess, iPctSkipped, iPctFailure ], [ '%s%% (%d)' % (iPctSuccess, dStatuses[ReportModelBase.ksTestStatus_Success]), '%s%% (%d)' % (iPctSkipped, dStatuses[ReportModelBase.ksTestStatus_Skipped]), '%s%% (%d)' % (iPctFailure, dStatuses[ReportModelBase.ksTestStatus_Failure]), ]); else: oTable.addRow(sPeriod, [ 0, 0, 0 ], [ '0%', '0%', '0%' ]); cTotalNow = adPeriods[0][ReportModelBase.ksTestStatus_Success]; cTotalNow += adPeriods[0][ReportModelBase.ksTestStatus_Skipped]; cSuccessNow = cTotalNow; cTotalNow += adPeriods[0][ReportModelBase.ksTestStatus_Failure]; sReport += '<p>Current success rate: '; if cTotalNow > 0: sReport += '%s%% (thereof %s%% skipped)</p>\n' \ % ( cSuccessNow * 100 / cTotalNow, adPeriods[0][ReportModelBase.ksTestStatus_Skipped] * 100 / cTotalNow); else: sReport += 'N/A</p>\n' oGraph = WuiHlpBarGraph('success-rate', oTable, self._oDisp); oGraph.setRangeMax(100); sReport += oGraph.renderGraph(); return sReport; class WuiReportFailureReasons(WuiReportBase): """ Generates a report displaying the failure reasons over time. """ def generateReportBody(self): # Mockup. self._sTitle = 'Success rate'; return '<p>Graph showing COUNT(idFailureReason) grouped by time period.</p>' \ '<p>New reasons per period, tracked down to build revision.</p>' \ '<p>Show graph content in table form.</p>'; class WuiReportSummary(WuiReportBase): """ Summary report. """ def generateReportBody(self): self._sTitle = 'Summary'; sHtml = '<p>This will display several reports and listings useful to get an overview of %s (id=%s).</p>' \ % (self._oModel.sSubject, self._oModel.aidSubjects,); oSuccessRate = WuiReportSuccessRate(self._oModel, self._dParams, fSubReport = True, fnDPrint = self._fnDPrint, oDisp = self._oDisp); sHtml += oSuccessRate.show()[1]; return sHtml;
gpl-2.0
401,514,084,464,273,340
37.037975
126
0.624958
false
3.703019
true
false
false
darongliu/Lstm_Turing_LM_tf
all_model_prototype/model.py
1
7416
import tensorflow as tf def inference(input_x, embedding_dim, lstm_hidden_dim_1, lstm_hidden_dim_2=None, vocab_size, dropout=None) : """ Args: input_x: 2D tensor batch_size X time_step embedding_dim: embedding dimension hidden_unit_list: list of the hidden unit size lstm_hidden_dim_1: the dimension of the hidden unit of the bottom lstm lstm_hidden_dim_2(optional): the dimension of the hidden unit of the top lstm Returns: """ with tf.name_scope('embedding'): init_width = 0.5 / embedding_dim emb = tf.Variable( tf.random_uniform( [vocab_size, embedding_dim], -init_width, init_width), name="emb") input_emb = tf.nn.embedding_lookup(emb, input_x) with tf.name_scope('recurrent_layer1'): cell = tf.nn.rnn_cell.LSTMCell(lstm_hidden_dim_1, state_is_tuple=True) if dropout: cell = tf.nn.rnn_cell.DropoutWrapper(cell, output_keep_prob=dropout) initial_state_vector = tf.get_variable('initial_state_vector', [1, lstm_hidden_dim_1]) initial_state = tf.tile(initial_state_vector, [tf.shape(input_x)[0], 1]) lstm1_outputs, final_state = tf.nn.dynamic_rnn(cell, input_emb, initial_state=initial_state) #lstm1_outputs: [batch_size, num_steps, state_size] with tf.name_scope('attention_layer'): #using scan #TODO memory = tf.concat([initial_state,lstm1_outputs],1) #memory: [batch_size, num_steps, state_size] W_key = tf.get_variable('W_key', [lstm_hidden_dim_1, lstm_hidden_dim_1]) b_key = tf.get_variable('b_key', [lstm_hidden_dim_1], initializer=tf.constant_initializer(0.0)) W_memory_selection_1 = tf.get_variable('W_memory_selection_1', [lstm_hidden_dim_1, lstm_hidden_dim_1]) b_memory_selection_1 = tf.get_variable('b_memory_selection_1', [lstm_hidden_dim_1], initializer=tf.constant_initializer(0.0)) W_memory_selection_2 = tf.get_variable('W_memory_selection_2', [lstm_hidden_dim_1, lstm_hidden_dim_1]) b_memory_selection_2 = tf.get_variable('b_memory_selection_2', [lstm_hidden_dim_1], initializer=tf.constant_initializer(0.0)) def step(_,time_step): """ args: timestep:from 0 to the last time step """ current_hidden = memory[:,time_step+1,:] previous_hidden = memory[:,:time_step+1,:] #previous_hidden: [batch_size, num_steps, state_size] memory_selection_1 = tf.sigmoid(tf.matmul(current_hidden,W_memory_selection_1) + b_memory_selection_1) memory_selection_2 = tf.sigmoid(tf.matmul(current_hidden,W_memory_selection_2) + b_memory_selection_2) #memory selection: [batch_size, state_size] key = tf.matmul(current_hidden,W_key) + b_key #key: [batch_size, state_size] memory_selection_1_as_matrices = tf.expand_dims(memory_selection_1,1) similarity_temp = tf.multiply(previous_hidden,memory_selection_1_as_matrices) key_as_matrices = tf.expand_dims(key,2) similarity = tf.batch_matmul(similarity_temp, key_as_matrices) #similarity = [batch_size, num_steps] weight = tf.nn.softmax(similarity) entropy_temp = -tf.multiply(weight,tf.log(weight)) entropy = tf.reduce_sum(entropy_temp,1) weight_pad_length = tf.shape(input_x)[1] - time_step - 1 weight_pad = tf.concat([weight,tf.zeros([tf.shape(input_x)[0],weight_pad_length])],1) #for weight visualization memory_selection_2_as_matrices = tf.expand_dims(memory_selection_2,1) attention_temp = tf.multiply(previous_hidden,memory_selection_2_as_matrices) weight_as_matrics = tf.expand_dims(weight,1) attention = tf.batch_matmul(weight_as_matrics, previous_hidden) attention = tf.squeeze(attention,[1]) return attention, weight_pad, entropy time_step_sequence = tf.range(0,tf.shape(input_x)[2]) initializer = [tf.zeros([tf.shape(input_x)[0], lstm_hidden_dim_1]), tf.zeros_like(input_x), tf.zeros([tf.shape(input_x)[0],])] att_outputs, weight_outputs, entropy_outputs = \ tf.scan(step,time_step_sequence,initializer=initializer) with th.name_scope('merge_layer'): if merge_mode == 'concat': att_lstm_outputs = tf.concat([lstm1_outputs,att_outputs],2) else if merge_mode == 'matrix': merge_matrix = tf.Variable( tf.random_uniform( [lstm_hidden_dim_1, lstm_hidden_dim_1], -1, 1), name="merge_matrix") att_lstm_outputs = tf.matmul(lstm1_outputs,att_outputs) + lstm1_outputs else if merge_mode == 'alpha': merge_alpha = tf.Variable( 1, name="merge_alpha") att_lstm_outputs = merge_alpha*lstm1_outputs + (1-merge_alpha)*att_outputs else: print("unknown merge mode, using concat") att_lstm_outputs = tf.concat([lstm1_outputs,att_outputs],2) with tf.name_scope('recurrent_layer2'): cell = tf.nn.rnn_cell.LSTMCell(lstm_hidden_dim_2, state_is_tuple=True) if dropout: cell = tf.nn.rnn_cell.DropoutWrapper(cell, output_keep_prob=dropout) initial_state_vector = tf.get_variable('initial_state_vector', [1, lstm_hidden_dim_2]) initial_state = tf.tile(initial_state_vector, [tf.shape(input_x)[0], 1]) lstm2_outputs, final_state = tf.nn.dynamic_rnn(cell, att_lstm_outputs, initial_state=initial_state) att_lstm_outputs = lstm2_outputs with tf.name_scope('output_lstm_linear'): W = tf.get_variable('W', [lstm_hidden_dim_2, vocab_size],name="w") b = tf.get_variable('b', [vocab_size], initializer=tf.constant_initializer(0.0),name="b") logits = tf.matmul(att_lstm_outputs, W) + b #with tf.name_scope('entropy_loss') : #dropout, pretrain #add pretrain_param, output_linear_param return logits def loss(logits, labels, entropy=None, entropy_reg=0) : """ args: logits: [batch_size, num_steps, vocab_size] labels: [batch_size, num_steps] return : label_loss: for calculating perplexity loss: for training evaluate perplexity: exp(average of the entropy per word) """ cross_entropy_result = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=logits) label_loss = tf.reduce_sum(cross_entropy_result) #devide vocab size loss = tf.reduce_mean(cross_entropy_result) if entropy : loss = label_loss + entropy_reg*entropy return label_loss, loss def training(loss, learning_rate) : # Add a scalar summary for the snapshot loss. tf.summary.scalar('loss', loss) # Create the gradient descent optimizer with the given learning rate. optimizer = tf.train.GradientDescentOptimizer(learning_rate) # Create a variable to track the global step. global_step = tf.Variable(0, name='global_step', trainable=False) # Use the optimizer to apply the gradients that minimize the loss # (and also increment the global step counter) as a single training step. train_op = optimizer.minimize(loss, global_step=global_step) return train_op
mit
-7,664,722,447,724,385,000
43.674699
133
0.628506
false
3.447699
false
false
false
mice-software/maus
bin/online/reconstruct_daq_emr_reducer.py
1
2746
#!/usr/bin/env python ################################################################# ###!!! YOU ARE NOT ALLOWED TO MODIFY THIS FILE DIRECTLY !!!### ###!!! PLEASE MAKE A COPY OF THIS FILE WITH THE CP COMMAND !!!### ################################################################# """ Online analysis to produce reconstructed events from the MICE Experiment. """ import MAUS import io def run(): """ Analyze data from the MICE experiment This reads in and processes data taken from the MICE experiment. """ # Set up data cards. data_cards_list = [] # batch mode = runs ROOT in batch mode so that canvases are not displayed # 1 = True, Batch Mode # 0 = False, Interactive Mode # setting it to false/0 will cause canvases to pop up on screen and # will get refreshed every N spills set by the refresh_rate data # card. data_cards_list.append("root_batch_mode=%d\n" % 1) # refresh_rate = once in how many spills should canvases be updated data_cards_list.append("refresh_rate=%d\n" % 5) # Add auto-numbering to the image tags. If False then each # histogram output for successive spills will have the same tag # so there are no spill-specific histograms. This is the # recommended use for online reconstruction. data_cards_list.append("histogram_auto_number=%s\n" % False) # Default image type is eps. For online use, use PNG. data_cards_list.append("histogram_image_type=\"png\"\n") # Directory for images. Default: $MAUS_WEB_MEDIA_RAW if set # else the current directory is used. # Uncomment and change the following if you want to hard # code a different default path. #data_cards_list.append("image_directory='%s'\n" % os.getcwd()) # Convert data_cards to string. data_cards = io.StringIO(unicode("".join(data_cards_list))) # Set up the input that reads from DAQ # my_input = MAUS.InputCppDAQData() my_input = MAUS.InputCppDAQOfflineData() # my_input = MAUS.InputCppDAQOnlineData() # pylint: disable = E1101 # Create an empty array of mappers, then populate it # with the functionality you want to use. my_map = MAUS.MapPyGroup() my_map.append(MAUS.MapCppReconSetup()) my_map.append(MAUS.MapCppEMRPlaneHits()) my_map.append(MAUS.MapCppEMRSpacePoints()) my_map.append(MAUS.MapCppEMRRecon()) # Histogram reducer. #reducer = MAUS.ReducePyDoNothing() reducer = MAUS.ReduceCppEMRPlot() # Save images as eps and meta-data as JSON. #output_worker = MAUS.OutputPyDoNothing() output_worker = MAUS.OutputPyRootImage() # Run the workflow. MAUS.Go(my_input, my_map, reducer, output_worker, data_cards) if __name__ == '__main__': run()
gpl-3.0
-276,372,884,407,744,030
36.616438
77
0.645302
false
3.584856
false
false
false
teamtuga4/teamtuga4ever.repository
script.module.urlresolver/lib/urlresolver/plugins/idowatch.py
3
1963
""" grifthost urlresolver plugin Copyright (C) 2015 tknorris This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import re from urlresolver import common from urlresolver.resolver import UrlResolver, ResolverError class IDoWatchResolver(UrlResolver): name = "idowatch" domains = ["idowatch.net"] pattern = '(?://|\.)(idowatch\.net)/([0-9a-zA-Z]+)\.html' def __init__(self): self.net = common.Net() def get_media_url(self, host, media_id): web_url = self.get_url(host, media_id) html = self.net.http_GET(web_url).content match = re.search('''["']?sources['"]?\s*:\s*\[(.*?)\]''', html, re.DOTALL) if match: for match in re.finditer('''['"]?file['"]?\s*:\s*['"]?([^'"]+)''', match.group(1), re.DOTALL): stream_url = match.group(1) if not stream_url.endswith('smil'): return match.group(1) + '|User-Agent=%s' % (common.FF_USER_AGENT) raise ResolverError('Unable to resolve idowatch link. Filelink not found.') def get_url(self, host, media_id): return 'http://idowatch.net/%s' % (media_id) def get_host_and_id(self, url): r = re.search(self.pattern, url) if r: return r.groups() else: return False def valid_url(self, url, host): return re.search(self.pattern, url) or self.name in host
gpl-2.0
5,450,925,819,580,143,000
35.351852
106
0.640856
false
3.608456
false
false
false
cabalamat/parrot
util.py
1
2858
# util.py # # (c)1999 Philip Hunt # # Various python utility functions # Last altered: 11-May-1999 # History: # 28-Apr-1999 PhilHunt: created # 11-May-1999 PH: added savePickle(), loadPickle() functions import string, os, stat, pprint from cPickle import * #----------------------------------------------------------- # return 1st index of character (c) in string (s) # or -1 if no match # # Note: works for strings, lists and tuples def strIndex(s, c): for i in xrange(0,len(s)): if s[i] == c: return i # no match: return -1 #----------------------------------------------------------- # return 1st index of character (c) in string (s), starting # at character position (p). # Returns -1 if no match # # Note: works for strings, lists and tuples def strIndexp(s, c, p): for i in xrange(p,len(s)): if s[i] == c: return i # no match: return -1 #----------------------------------------------------------- # return 1st index of substring (ss) in string (s) # or -1 if no match # # Note: works for strings, lists and tuples def strIndexStr(s, ss): return strIndexStrp(s, ss, 0) #----------------------------------------------------------- # return 1st index of substring (ss) in string (s), starting # at position (p) in (s). # Returns -1 if no match # # Note: works for strings, lists and tuples def strIndexStrp(s, ss, p): ls = len(s) lss = len(ss) if ls == 0 or lss == 0 or lss>ls: return 0 for i in xrange(p,ls-lss+1): for j in xrange(0,lss): if s[i+j] != ss[j]: break else: #got match so return it return # no match: return -1 #----------------------------------------------------------- # is string (s) a directory? def isDir(s): mode = os.stat(s)[stat.ST_MODE] return stat.S_ISDIR(mode) #----------------------------------------------------------- # read a file into a string def readFile(filename): f = open(filename, 'r') s = f.read() f.close() return s #----------------------------------------------------------- # write a string into a file def writeFile(filename, newValue): # at a later date, add code here to create directories # if they don't exist f = open(filename, 'w') f.write(newValue) f.close() #----------------------------------------------------------- # save a pickled file def savePickle(filename, object): f = open(filename, 'w') p = Pickler(f) p.dump(object) f.close() #----------------------------------------------------------- # load a pickled file into an object def loadPickle(filename): try: f = open(filename, 'r') u = Unpickler(f) object = u.load() f.close() except: object = None return object #----------------------------------------------------------- #----------------------------------------------------------- #end
gpl-2.0
-4,633,167,410,953,049,000
22.235772
60
0.475507
false
3.673522
false
false
false
GeosoftInc/gxapi
spec/core/VULCAN.py
1
5218
from .. import Availability, Class, Constant, Define, Method, Parameter, Type gx_class = Class('VULCAN', doc="The :class:`VULCAN` class is used for importing Maptek® Vulcan block and triangulation files.") gx_defines = [ Define('BLOCK_MODEL_VARIABLE_TYPE', doc="Which variables to return from sReadBlockModelVariableInfo", constants=[ Constant('BLOCK_MODEL_NUMERIC_VARIABLE', value='1', type=Type.INT32_T, doc="Return numeric variable names"), Constant('BLOCK_MODEL_STRING_VARIABLE', value='2', type=Type.INT32_T, doc="Return string variable names") ])] gx_methods = { 'Miscellaneous': [ Method('IsValidTriangulationFile_VULCAN', module='geoengine.interoperability', version='8.4.0', availability=Availability.LICENSED, doc="Check if the given file can be opened as a Vulcan triangulation file.", return_type=Type.INT32_T, return_doc=""" 0 - No 1 - Yes """, parameters = [ Parameter('triangulation_file', type=Type.STRING, doc="Triangulation file") ]), Method('IsValidBlockModelFile_VULCAN', module='geoengine.interoperability', version='8.4.0', availability=Availability.LICENSED, doc="Check if the given file can be opened as a Vulcan block model file.", return_type=Type.INT32_T, return_doc=""" 0 - No 1 - Yes """, parameters = [ Parameter('block_model_file', type=Type.STRING, doc="Block model file") ]), Method('TriangulationToView_VULCAN', module='geoengine.interoperability', version='8.4.0', availability=Availability.LICENSED, doc="Draw triangle edges in a Vulcan triangulation file to a 3D view in a map.", return_type=Type.VOID, parameters = [ Parameter('triangulation_file', type=Type.STRING, doc="Triangulation file"), Parameter('ipj', type="IPJ", doc="Triangulation projection"), Parameter('mview', type="MVIEW", doc="Destination :class:`MVIEW`"), Parameter('new_group_name', type=Type.STRING, doc="New group name") ]), Method('GetBlockModelVariableInfo_VULCAN', module='geoengine.interoperability', version='8.4.0', availability=Availability.LICENSED, doc="Query a block model for the variable names and descriptions.", return_type=Type.VOID, parameters = [ Parameter('block_model_file', type=Type.STRING, doc="Block model file"), Parameter('query', type=Type.INT32_T, doc=":def:`BLOCK_MODEL_VARIABLE_TYPE` Which variables to return."), Parameter('lst', type="LST", doc="List used to return variable names/descriptions.") ]), Method('GetBlockModelStringVariableValues_VULCAN', module='geoengine.interoperability', version='8.4.0', availability=Availability.LICENSED, doc="Query a block model for the values a string variable can assume.", return_type=Type.VOID, parameters = [ Parameter('block_model_file', type=Type.STRING, doc="Block model file"), Parameter('variable_name', type=Type.STRING, doc="Variable name"), Parameter('lst', type="LST", doc="List used to return variable names") ]), Method('BlockModelToVoxel_VULCAN', module='geoengine.interoperability', version='8.4.0', availability=Availability.LICENSED, doc="Create a Geosoft voxel file from a Vulcan block model file.", return_type=Type.VOID, parameters = [ Parameter('block_model_file', type=Type.STRING, doc="Block model file"), Parameter('ipj', type="IPJ", doc="Block model projection"), Parameter('variable_to_export', type=Type.STRING, doc="Variable to import"), Parameter('output_voxel_filename', type=Type.STRING, doc="Ouput voxel filename"), Parameter('remove_default_values', type=Type.BOOL, doc="Remove default values from input?"), Parameter('rock_code_filename', type=Type.STRING, doc="Rock code file for string variable imports. Optional, unused for numeric variable imports.") ]) ] }
bsd-2-clause
3,033,769,105,831,783,000
47.305556
126
0.51773
false
4.88941
false
false
false
ryfeus/lambda-packs
Tensorflow/source/tensorboard/main.py
1
2214
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """TensorBoard main module. This module ties together `tensorboard.program` and `tensorboard.default_plugins` to provide standard TensorBoard. It's meant to be tiny and act as little other than a config file. Those wishing to customize the set of plugins or static assets that TensorBoard uses can swap out this file with their own. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorboard import default from tensorboard import program def main(unused_argv=None): """Standard TensorBoard program CLI. See `tensorboard.program.main` for further documentation. """ return program.main(default.get_plugins(), default.get_assets_zip_provider()) def create_tb_app(*args, **kwargs): tf.logging.warning('DEPRECATED API: create_tb_app() should now be accessed ' 'via the `tensorboard.program` module') return program.create_tb_app(*args, **kwargs) def make_simple_server(*args, **kwargs): tf.logging.warning('DEPRECATED API: make_simple_server() should now be ' 'accessed via the `tensorboard.program` module') return program.make_simple_server(*args, **kwargs) def run_simple_server(*args, **kwargs): tf.logging.warning('DEPRECATED API: run_simple_server() should now be ' 'accessed via the `tensorboard.program` module') return program.run_simple_server(*args, **kwargs) if __name__ == '__main__': tf.app.run()
mit
-1,357,330,156,759,209,000
34.709677
80
0.69467
false
4.122905
false
false
false
visitor83/fighting_stm32
bsp/fighting/rtconfig.py
1
3487
import SCons.cpp # component options # make all component false RT_USING_FINSH = False RT_USING_DFS = False RT_USING_DFS_ELMFAT = False RT_USING_DFS_YAFFS2 = False RT_USING_LWIP = False RT_USING_WEBSERVER = False RT_USING_RTGUI = False # parse rtconfig.h to get used component PreProcessor = SCons.cpp.PreProcessor() f = file('rtconfig.h', 'r') contents = f.read() f.close() PreProcessor.process_contents(contents) rtconfig_ns = PreProcessor.cpp_namespace # finsh shell options if rtconfig_ns.has_key('RT_USING_FINSH'): RT_USING_FINSH = True # device virtual filesystem options if rtconfig_ns.has_key('RT_USING_DFS'): RT_USING_DFS = True if rtconfig_ns.has_key('RT_USING_DFS_ELMFAT'): RT_USING_DFS_ELMFAT = True if rtconfig_ns.has_key('RT_USING_DFS_YAFFS2'): RT_USING_DFS_YAFFS2 = True # lwip options if rtconfig_ns.has_key('RT_USING_LWIP'): RT_USING_LWIP = True if rtconfig_ns.has_key('RT_USING_WEBSERVER'): RT_USING_WEBSERVER = True # rtgui options if rtconfig_ns.has_key('RT_USING_RTGUI'): RT_USING_RTGUI = True # toolchains options ARCH='arm' CPU='stm32' CROSS_TOOL='keil' if CROSS_TOOL == 'gcc': PLATFORM = 'gcc' EXEC_PATH = 'E:/Program Files/CodeSourcery/Sourcery G++ Lite/bin' elif CROSS_TOOL == 'keil': PLATFORM = 'armcc' EXEC_PATH = 'C:/Keil' BUILD = 'debug' STM32_TYPE = 'STM32F10X_HD' if PLATFORM == 'gcc': # toolchains PREFIX = 'arm-none-eabi-' CC = PREFIX + 'gcc' AS = PREFIX + 'gcc' AR = PREFIX + 'ar' LINK = PREFIX + 'gcc' TARGET_EXT = 'axf' SIZE = PREFIX + 'size' OBJDUMP = PREFIX + 'objdump' OBJCPY = PREFIX + 'objcopy' DEVICE = ' -mcpu=cortex-m3 -mthumb' CFLAGS = DEVICE + ' -DRT_USING_MINILIBC' AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp' LFLAGS = DEVICE + ' -Wl,--gc-sections,-Map=rtthread-stm32.map,-cref,-u,Reset_Handler -T stm32_rom.ld' CPATH = '' LPATH = '' if BUILD == 'debug': CFLAGS += ' -O0 -gdwarf-2' AFLAGS += ' -gdwarf-2' else: CFLAGS += ' -O2' RT_USING_MINILIBC = True POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n' elif PLATFORM == 'armcc': # toolchains CC = 'armcc' AS = 'armasm' AR = 'armar' LINK = 'armlink' TARGET_EXT = 'axf' DEVICE = ' --device DARMSTM' CFLAGS = DEVICE + ' --apcs=interwork --diag_suppress=870 ' AFLAGS = DEVICE LFLAGS = DEVICE + ' --info sizes --info totals --info unused --info veneers --list rtthread-stm32.map --scatter stm32_rom.sct' CFLAGS += ' -I' + EXEC_PATH + '/ARM/RV31/INC' LFLAGS += ' --libpath ' + EXEC_PATH + '/ARM/RV31/LIB' EXEC_PATH += '/arm/bin40/' if BUILD == 'debug': CFLAGS += ' -g -O0' AFLAGS += ' -g' else: CFLAGS += ' -O2' RT_USING_MINILIBC = False if RT_USING_FINSH: LFLAGS += ' --keep __fsym_* --keep __vsym_*' POST_ACTION = 'fromelf --bin $TARGET --output rtthread.bin \nfromelf -z $TARGET' elif PLATFORM == 'iar': # toolchains CC = 'iccarm' AS = 'iasmarm' AR = 'iarchive' LINK = 'ilinkarm' TARGET_EXT = 'out' DEVICE = ' --cpu DARMSTM --thumb' CFLAGS = '' AFLAGS = '' LFLAGS = ' --config stm32f10x_flash.icf' EXEC_PATH += '/arm/bin/' RT_USING_MINILIBC = False POST_ACTION = ''
gpl-2.0
1,810,656,682,017,844,500
24.022388
130
0.580728
false
2.713619
true
true
false
aksheus/Detect-Depression
generate_test_data.py
1
1925
""" Generates test data choose a chunk and it distributes it randomly so that Feature Space Tree can build the required vectors USAGE : generate-test_data.py path_to_test_data path_to_target_folders which_chunk_to_distribute """ # <imports> import os import sys from random import shuffle from shutil import copy #</imports> #< helper functions> join = lambda x,y: os.path.join(x,y) def get_source_files(data_path,required_chunk): chunk_dir = join(data_path,required_chunk) files = [ join(chunk_dir,file) for file in os.listdir(chunk_dir) if os.path.isfile( join(chunk_dir,file) ) ] return files def get_destination_folders(target_path): folders = [ join(target_path,folder) for folder in os.listdir(target_path) if os.path.isdir( join(target_path,folder) ) ] return folders #</helper functions> # <main> if __name__ == '__main__': if os.path.isdir(sys.argv[1]): data_path = sys.argv[1] else: print 'test data directory is invalid' exit(1) if os.path.isdir(sys.argv[2]): target_path = sys.argv[2] else: print 'target directory is invalid' exit(2) if sys.argv[3] is not None: required_chunk = sys.argv[3] source_files = get_source_files(data_path,required_chunk) destination_folders = get_destination_folders(target_path) file_count = len(source_files) folder_count = len(destination_folders) folder_capacity = file_count // folder_count shuffle(source_files) shuffle(destination_folders) start = 0 end = folder_capacity for folder in destination_folders: for file in source_files[start:end+1]: copy(file,folder) start +=folder_capacity end+=folder_capacity # write the random residual files to the last randomly chosen folder last_folder = destination_folders[-1] for file in source_files[start:]: copy(file,last_folder) #</main>
mit
-3,005,639,251,030,700,500
21.137931
103
0.676883
false
3.181818
false
false
false
crscardellino/dnnwsd
preprocess/semeval_vec.py
1
2689
#!/usr/bin/env python # -*- coding: utf-8 -*- # Add dnnwsd to the path import sys from os import path sys.path.append(path.abspath(path.dirname('../'))) import cPickle as pickle import uuid import numpy as np import gensim # Change the log config file to relative path from dnnwsd.utils import setup_logging setup_logging.CONFIG_FILE = u"../config/logging.yaml" from dnnwsd.corpus import semeval, unannotated from dnnwsd.processor import vecprocessor annotated_corpus_directory = "../resources/semeval/lexelts" unannotated_corpus_directory = "../../wikicorpus/en/wikicorpus_lemmas_sample_7k/" corpus_datasets_dir = "../resources/corpus_datasets/en/7k/vec" annotated_corpus_directory_iterator = semeval.SemevalCorpusDirectoryIterator(annotated_corpus_directory) unannotated_corpus_directory_iterator = unannotated.UnannotatedCorpusDirectoryIterator(unannotated_corpus_directory) word_vectors_path = "../resources/wordvectors/GoogleNews-vectors-negative300.bin.gz" word2vec_model = gensim.models.Word2Vec.load_word2vec_format(word_vectors_path, binary=True) for corpus_index, annotated_corpus in enumerate(annotated_corpus_directory_iterator): if not annotated_corpus.has_multiple_senses(): print u"Skipping preprocess for corpus of lemma {}".format(annotated_corpus.lemma) continue unannotated_corpus = unannotated_corpus_directory_iterator[annotated_corpus.lemma] vec_processor = vecprocessor.SemiSupervisedWordVectorsProcessor( annotated_corpus, unannotated_corpus, word2vec_model) vec_processor.instances() annotated_dataset = dict(data=vec_processor.dataset, target=vec_processor.target, labels=vec_processor.labels) sentences_ids = [] unannotated_sentences = {} for sentence in unannotated_corpus: sentence_id = str(uuid.uuid4()) raw_sentence = [] for word in sentence: word_token = u"_{}_".format(word.token) if word.is_main_lemma else word.token raw_sentence.append(word_token) raw_sentence = " ".join(raw_sentence) sentences_ids.append(sentence_id) unannotated_sentences[sentence_id] = raw_sentence unannotated_dataset = dict(data=vec_processor.unannotated_dataset, sentences=np.array(sentences_ids)) lemma_dataset = dict( lemma=annotated_corpus.lemma, index="{:03d}".format(corpus_index), annotated_dataset=annotated_dataset, unannotated_dataset=unannotated_dataset, unannotated_sentences=unannotated_sentences ) corpus_dataset = path.join(corpus_datasets_dir, "{:03d}.p".format(corpus_index)) with open(corpus_dataset, "wb") as f: pickle.dump(lemma_dataset, f)
bsd-3-clause
-4,322,636,156,738,971,000
34.381579
116
0.731871
false
3.487678
false
false
false
alevy/Dunbarify
models/account.py
1
2191
# Copyright (c) 2011 Amit Levy <amit@amitlevy.com> # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, # modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the # Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import hashlib import hmac from models.person import Person from google.appengine.ext import db from google.appengine.api import users class Account(db.Model): user = db.UserProperty(required=True) nickname = db.StringProperty(required=True) display_name = db.StringProperty(required=True) secret_token = db.StringProperty(required=True) restricted = db.BooleanProperty(required=True, default=True) about = db.TextProperty() email = db.EmailProperty() people = db.ListProperty(db.Key) def verify(self, nonce, signature): return hmac.new(str(self.secret_token), str(nonce), hashlib.sha1).hexdigest() == signature def people_models(self): return Person.get(self.people) def circles(self): """docstring for circles""" cs = dict() peeps = Person.get(self.people) for person in peeps: for circle in person.circles: cs.setdefault(circle, []).append(person) return cs def people_in(self, circle): return self.circles()[circle] @classmethod def get_by_user(cls, user): return Account.all().filter("user =", user).get()
mit
-512,721,832,861,470,140
45.638298
125
0.748973
false
4.027574
false
false
false
tikael1011/aiflee_python
Localization and Evaluation/count_result_on_org.py
1
2964
#UTF-8 Jie ''' This is about the basic footprint method to evaluate accuray, results includes several parameters. Will have details below ''' import os import statistics import math import numpy as np import statsmodels.api as sm import matplotlib.pyplot as plt import tkinter as tk from collections import Counter from time import time from glob import glob ''' This function is used to output a sorted path array, since I want to measure point by point ''' def sortlistdir(dirnamepath): files = glob(dirnamepath) a = [] for item in range(1,len(files)+1): a.append(dirnamepath[:-5] + str(item) + ".txt") return a ''' This function is used to find the nth occurance of needle in the haystack, mainly use this to find hallwayname or point name ''' def find_nth(haystack, needle, n): start = haystack.find(needle) while start >= 0 and n > 1: start = haystack.find(needle, start+len(needle)) n -= 1 return start ''' This function is used to list all the txt files under on directory and its subdirectory. Kinda like traverse or walk ''' def list_files(dir): r = [] for root, dirs, files in os.walk(dir): for name in files: if(not name.startswith('.')): r.append(os.path.join(root, name)) return r t = time() rootdir = '/Users/Qian/Desktop/wifidata' # This is about the margin of hallwaypoint # say the ground truth is 5 and threshold is 2 # then as long as the result is 3~7, it will be regarded # as correct. threshold = input("Input threshold: ") file_open = open('data_filtered.txt','r') all_data = [x.strip() for x in file_open.readlines()] path = list_files(rootdir) rt = 0 total = 0 for txtfile in path: input_open = open(txtfile,'r') all_input = [x.strip() for x in input_open.readlines()] cad = [] i = 0 for element in all_input: if(element == ""): continue if(element == '****####****'): break if (i >= 20): break idata = element.split() #print(idata) iname = idata[0] ivalue = int(idata[3]) # was 1 for the training data and float cast if (ivalue <= -85.0): continue i = i + 1 for base in all_data: jdata = base.split(',') jname = jdata[0] jvalue = float(jdata[1]) #Here 3 is the inner threshold for signal strength, same story as hallway point. if (iname == jname and (jvalue - 3 <= ivalue and ivalue - 3 <= jvalue )): cad.append(jdata[2]) # utlize counter function to give confident score result = [loc for loc, count in Counter(cad).most_common(1)] startindex = find_nth(txtfile, '/', 6) point = int((' '.join(result)).split('.')[1]) print("Cal" + " " + txtfile + " " + ' '.join(result)) if(point >= int(txtfile[startindex+1:-4]) - int(threshold) and point <= int(txtfile[startindex+1:-4]) + int(threshold)): rt = rt + 1 total = total + 1 input_open.close() print(total) print(rt) print(rt/total) print(time()-t) # For the whole testing result please check threshold_res.xlsx
gpl-3.0
-7,874,098,802,753,997,000
22.712
121
0.659919
false
3.093946
false
false
false
wangjun/pyload
module/plugins/hoster/DlFreeFr.py
2
7894
#!/usr/bin/env python # -*- coding: utf-8 -*- import re import pycurl from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, replace_patterns from module.common.json_layer import json_loads from module.network.Browser import Browser from module.network.CookieJar import CookieJar class CustomBrowser(Browser): def __init__(self, bucket=None, options={}): Browser.__init__(self, bucket, options) def load(self, *args, **kwargs): post = kwargs.get("post") if post is None: if len(args) > 2: post = args[2] if post: self.http.c.setopt(pycurl.FOLLOWLOCATION, 0) self.http.c.setopt(pycurl.POST, 1) self.http.c.setopt(pycurl.CUSTOMREQUEST, "POST") else: self.http.c.setopt(pycurl.FOLLOWLOCATION, 1) self.http.c.setopt(pycurl.POST, 0) self.http.c.setopt(pycurl.CUSTOMREQUEST, "GET") return Browser.load(self, *args, **kwargs) class AdYouLike(): """ Class to support adyoulike captcha service """ ADYOULIKE_INPUT_PATTERN = r'Adyoulike.create\((.*?)\);' ADYOULIKE_CALLBACK = r'Adyoulike.g._jsonp_5579316662423138' ADYOULIKE_CHALLENGE_PATTERN = ADYOULIKE_CALLBACK + r'\((.*?)\)' def __init__(self, plugin, engine="adyoulike"): self.plugin = plugin self.engine = engine def challenge(self, html): adyoulike_data_string = None found = re.search(self.ADYOULIKE_INPUT_PATTERN, html) if found: adyoulike_data_string = found.group(1) else: self.plugin.fail("Can't read AdYouLike input data") # {"adyoulike":{"key":"P~zQ~O0zV0WTiAzC-iw0navWQpCLoYEP"}, # "all":{"element_id":"ayl_private_cap_92300","lang":"fr","env":"prod"}} ayl_data = json_loads(adyoulike_data_string) res = self.plugin.load( r'http://api-ayl.appspot.com/challenge?key=%(ayl_key)s&env=%(ayl_env)s&callback=%(callback)s' % { "ayl_key": ayl_data[self.engine]["key"], "ayl_env": ayl_data["all"]["env"], "callback": self.ADYOULIKE_CALLBACK}) found = re.search(self.ADYOULIKE_CHALLENGE_PATTERN, res) challenge_string = None if found: challenge_string = found.group(1) else: self.plugin.fail("Invalid AdYouLike challenge") challenge_data = json_loads(challenge_string) return ayl_data, challenge_data def result(self, ayl, challenge): """ Adyoulike.g._jsonp_5579316662423138 ({"translations":{"fr":{"instructions_visual":"Recopiez « Soonnight » ci-dessous :"}}, "site_under":true,"clickable":true,"pixels":{"VIDEO_050":[],"DISPLAY":[],"VIDEO_000":[],"VIDEO_100":[], "VIDEO_025":[],"VIDEO_075":[]},"medium_type":"image/adyoulike", "iframes":{"big":"<iframe src=\"http://www.soonnight.com/campagn.html\" scrolling=\"no\" height=\"250\" width=\"300\" frameborder=\"0\"></iframe>"},"shares":{},"id":256, "token":"e6QuI4aRSnbIZJg02IsV6cp4JQ9~MjA1","formats":{"small":{"y":300,"x":0,"w":300,"h":60}, "big":{"y":0,"x":0,"w":300,"h":250},"hover":{"y":440,"x":0,"w":300,"h":60}}, "tid":"SqwuAdxT1EZoi4B5q0T63LN2AkiCJBg5"}) """ response = None try: instructions_visual = challenge["translations"][ayl["all"]["lang"]]["instructions_visual"] found = re.search(u".*«(.*)».*", instructions_visual) if found: response = found.group(1).strip() else: self.plugin.fail("Can't parse instructions visual") except KeyError: self.plugin.fail("No instructions visual") #TODO: Supports captcha if not response: self.plugin.fail("AdYouLike result failed") return {"_ayl_captcha_engine": self.engine, "_ayl_env": ayl["all"]["env"], "_ayl_tid": challenge["tid"], "_ayl_token_challenge": challenge["token"], "_ayl_response": response} class DlFreeFr(SimpleHoster): __name__ = "DlFreeFr" __type__ = "hoster" __pattern__ = r"http://dl\.free\.fr/([a-zA-Z0-9]+|getfile\.pl\?file=/[a-zA-Z0-9]+)" __version__ = "0.25" __description__ = """dl.free.fr download hoster""" __author_name__ = ("the-razer", "zoidberg", "Toilal") __author_mail__ = ("daniel_ AT gmx DOT net", "zoidberg@mujmail.cz", "toilal.dev@gmail.com") FILE_NAME_PATTERN = r"Fichier:</td>\s*<td[^>]*>(?P<N>[^>]*)</td>" FILE_SIZE_PATTERN = r"Taille:</td>\s*<td[^>]*>(?P<S>[\d.]+[KMG])o" FILE_OFFLINE_PATTERN = r"Erreur 404 - Document non trouv|Fichier inexistant|Le fichier demand&eacute; n'a pas &eacute;t&eacute; trouv&eacute;" #FILE_URL_PATTERN = r'href="(?P<url>http://.*?)">T&eacute;l&eacute;charger ce fichier' def setup(self): self.multiDL = self.resumeDownload = True self.limitDL = 5 self.chunkLimit = 1 def init(self): factory = self.core.requestFactory self.req = CustomBrowser(factory.bucket, factory.getOptions()) def process(self, pyfile): self.req.setCookieJar(None) pyfile.url = replace_patterns(pyfile.url, self.FILE_URL_REPLACEMENTS) valid_url = pyfile.url headers = self.load(valid_url, just_header=True) self.html = None if headers.get('code') == 302: valid_url = headers.get('location') headers = self.load(valid_url, just_header=True) if headers.get('code') == 200: content_type = headers.get('content-type') if content_type and content_type.startswith("text/html"): # Undirect acces to requested file, with a web page providing it (captcha) self.html = self.load(valid_url) self.handleFree() else: # Direct access to requested file for users using free.fr as Internet Service Provider. self.download(valid_url, disposition=True) elif headers.get('code') == 404: self.offline() else: self.fail("Invalid return code: " + str(headers.get('code'))) def handleFree(self): action, inputs = self.parseHtmlForm('action="getfile.pl"') adyoulike = AdYouLike(self) ayl, challenge = adyoulike.challenge(self.html) result = adyoulike.result(ayl, challenge) inputs.update(result) self.load("http://dl.free.fr/getfile.pl", post=inputs) headers = self.getLastHeaders() if headers.get("code") == 302 and "set-cookie" in headers and "location" in headers: found = re.search("(.*?)=(.*?); path=(.*?); domain=(.*?)", headers.get("set-cookie")) cj = CookieJar(__name__) if found: cj.setCookie(found.group(4), found.group(1), found.group(2), found.group(3)) else: self.fail("Cookie error") location = headers.get("location") self.req.setCookieJar(cj) self.download(location, disposition=True) else: self.fail("Invalid response") def getLastHeaders(self): #parse header header = {"code": self.req.code} for line in self.req.http.header.splitlines(): line = line.strip() if not line or ":" not in line: continue key, none, value = line.partition(":") key = key.lower().strip() value = value.strip() if key in header: if type(header[key]) == list: header[key].append(value) else: header[key] = [header[key], value] else: header[key] = value return header getInfo = create_getInfo(DlFreeFr)
gpl-3.0
-8,672,376,838,864,295,000
38.253731
146
0.57275
false
3.431927
false
false
false
google/b-con
handlers/dv360_handler.py
1
10385
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Handler for DV360. This handler provides functions which trigger various reports on DV360 and download and consolidate the data from the reports. """ import collections import concurrent.futures import copy import json import socket from absl import app from absl import flags from absl import logging from utils import user_account_credentials from utils import service_account_credentials from utils import config from utils import util from handlers import sheets_handler FLAGS = flags.FLAGS _ADV_ID = 'Advertiser ID' _ADV_TIME_ZONE = 'Advertiser Time Zone' _MAX_WORKERS = 1 _timezone_query = { 'kind': 'doubleclickbidmanager#query', 'metadata': { 'title': 'b-con timezone report', 'dataRange': 'LAST_90_DAYS', 'format': 'CSV', 'sendNotification': False }, 'params': { 'type': 'TYPE_FEE', 'groupBys': [ 'FILTER_ADVERTISER', 'FILTER_ADVERTISER_TIMEZONE', ], 'filters': [{ 'type': 'FILTER_PARTNER', 'value': '234340' }], 'includeInviteData': True, # Need to specify at least one metric for the query to work. 'metrics': ['METRIC_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS'], }, 'schedule': { 'frequency': 'ONE_TIME' }, } _base_query = { 'kind': 'doubleclickbidmanager#query', 'metadata': { 'title': 'b-con metrics report', 'dataRange': 'CUSTOM_DATES', 'format': 'CSV', 'sendNotification': False }, 'params': { 'type': 'TYPE_FEE', 'groupBys': [ 'FILTER_PARTNER', 'FILTER_ADVERTISER', 'FILTER_MEDIA_PLAN', 'FILTER_INSERTION_ORDER', 'FILTER_LINE_ITEM', ], 'filters': [{ 'type': 'FILTER_INSERTION_ORDER', 'value': '10009356' }, { 'type': 'FILTER_ADVERTISER', 'value': '3686031' }, { 'type': 'FILTER_INSERTION_ORDER', 'value': '10179643' }, { 'type': 'FILTER_ADVERTISER', 'value': '3624237' }], 'metrics': [ 'METRIC_CLICKS', 'METRIC_IMPRESSIONS', 'METRIC_MEDIA_COST_ADVERTISER', 'METRIC_PLATFORM_FEE_ADVERTISER', 'METRIC_CPM_FEE1_ADVERTISER', 'METRIC_CPM_FEE2_ADVERTISER', 'METRIC_CPM_FEE3_ADVERTISER', 'METRIC_CPM_FEE4_ADVERTISER', 'METRIC_CPM_FEE5_ADVERTISER', 'METRIC_MEDIA_FEE1_ADVERTISER', 'METRIC_MEDIA_FEE2_ADVERTISER', 'METRIC_MEDIA_FEE3_ADVERTISER', 'METRIC_MEDIA_FEE4_ADVERTISER', 'METRIC_MEDIA_FEE5_ADVERTISER', 'METRIC_DATA_COST_ADVERTISER', 'METRIC_REVENUE_ADVERTISER', 'METRIC_BILLABLE_COST_ADVERTISER', ], 'includeInviteData': True }, 'schedule': { 'frequency': 'ONE_TIME' }, 'reportDataStartTimeMs': 1561910400000, 'reportDataEndTimeMs': 1564588800000, 'timezoneCode': 'Pacific/Auckland', } class ReportError(Exception): pass def _get_service(): socket.setdefaulttimeout(300) # Getting socket.timeout issues sometimes. if config.params['dv360']['use_user_credentials']: service = user_account_credentials.get_dbm_service() else: service = service_account_credentials.get_dbm_service() return service def _get_report_status(service, query_id): logging.info('Getting report status') response = service.queries().getquery(queryId=query_id).execute() is_running = response.get('metadata').get('running') report_url = response.get('metadata').get( 'googleCloudStoragePathForLatestReport') query_status = {} if (not is_running) and report_url: # Query completed successfully. logging.info('Report completed: %s', query_id) query_status['status'] = 'Completed' query_status['url'] = report_url elif (not is_running) and not report_url: # Query failed. query_status['status'] = 'Failed' else: # Query is still running. query_status['status'] = 'Running' return query_status def _get_timezone_query_filters(partners): filters = [] for p in partners: partner_filter = {} partner_filter['type'] = 'FILTER_PARTNER' partner_filter['value'] = p filters.append(partner_filter) return filters def _get_query_filters(adv_timezone): """Create query filters and group by the invoice date. Each report can only have one date range. So if we have multiple invoices with different invoice dates, we'll need to run one query for each of them. """ filters = collections.defaultdict(list) for adv, timezone in adv_timezone.items(): advertiser_filter = {} advertiser_filter['type'] = 'FILTER_ADVERTISER' advertiser_filter['value'] = adv filters[timezone].append(advertiser_filter) return filters def create_timezone_report(partners): logging.info('Creating timezone report.') query = copy.deepcopy(_timezone_query) timezone_filters = _get_timezone_query_filters(partners) query['params']['filters'] = timezone_filters service = _get_service() response = service.queries().createquery(body=query).execute() logging.info('Created DV360 query: %s', response.get('queryId')) query_id = response.get('queryId') while True: # Wait until the report has completed or failed. query_status = _get_report_status(service, query_id) if query_status['status'] == 'Completed': logging.info('Timezone report completed.') break elif query_status['status'] == 'Failed': raise ReportError time.sleep(5) logging.info(query_status['url']) return query_status['url'] def _create_report(service, report_start_time, report_end_time, cur_timezone, cur_filter): query = copy.deepcopy(_base_query) query['params']['filters'] = cur_filter query['reportDataStartTimeMs'] = report_start_time query['reportDataEndTimeMs'] = report_end_time logging.info(cur_timezone) query['timezoneCode'] = cur_timezone logging.info(json.dumps(query)) response = service.queries().createquery(body=query).execute() return response.get('queryId') def create_reports(timezone_report_data, report_start_time, report_end_time): logging.info('Creating reports.') valid_rows = util.get_valid_rows(timezone_report_data) adv_timezone = {} for row in valid_rows: adv_timezone[row[_ADV_ID]] = row[_ADV_TIME_ZONE] logging.info('Found %d timezones.', len(set(adv_timezone.keys()))) filters = _get_query_filters(adv_timezone) service = _get_service() queries = [] # Get data for last month. # last_month = datetime.datetime.now() - relativedelta(months=1) # end_date = last_month + relativedelta(day=31) with concurrent.futures.ThreadPoolExecutor( max_workers=_MAX_WORKERS) as executor: wait_for = [ executor.submit(_create_report, service, report_start_time, report_end_time, cur_timezone, cur_filter) for cur_timezone, cur_filter in filters.items() ] for f in concurrent.futures.as_completed(wait_for): queries.append(f.result()) return queries def _get_report_status(service, query_id): response = service.queries().getquery(queryId=query_id).execute() is_running = response.get('metadata').get('running') report_url = response.get('metadata').get( 'googleCloudStoragePathForLatestReport') query_status = {} if (not is_running) and report_url: # Query completed successfully. logging.info('Report completed: %s', query_id) query_status['status'] = 'Completed' query_status['url'] = report_url elif (not is_running) and not report_url: # Query failed. query_status['status'] = 'Failed' else: # Query is still running. query_status['status'] = 'Running' return query_status def _get_reports_status(query_ids): service = _get_service() status_all = [] with concurrent.futures.ThreadPoolExecutor( max_workers=_MAX_WORKERS) as executor: wait_for = [ executor.submit(_get_report_status, service, query_id) for query_id in query_ids ] for f in concurrent.futures.as_completed(wait_for): status_all.append(f.result()) return status_all def wait_for_reports_to_complete(query_ids): while True: # TODO: Change logic to only check the ones that are still running. statuses = _get_reports_status(query_ids) if all([s['status'] == 'Completed' for s in statuses]): break return [s['url'] for s in statuses] def get_user_permissions(): """Get user assigned roles from DV360.""" # User permissions have to be obtained only with service accounts. Hence # getting the service directly instead of using _get_service(). # Refer: https://developers.google.com/display-video/api/guides/users/overview. service = service_account_credentials.get_dv_service() response = service.users().list().execute() user_perms = [] for user in response['users']: email = util.hash_single(user['email']) assigned_user_roles = user['assignedUserRoles'] for assigned_role in assigned_user_roles: if 'partner' in assigned_role['assignedUserRoleId']: entity_type = 'partner' entity_id = assigned_role['partnerId'] else: entity_type = 'advertiser' entity_id = assigned_role['advertiserId'] user_role = assigned_role['userRole'] user_perms.append({ 'email': email, 'entity_type': entity_type, 'entity_id': entity_id, 'user_role': user_role, }) return user_perms def create_advertiser_report(): pass def download_timezone_report(): pass def download_advertiser_report(): pass
apache-2.0
7,724,194,224,648,506,000
28.171348
81
0.648146
false
3.646419
false
false
false
adkerr/tempest
tempest/services/network/json/network_client.py
1
26276
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json from tempest.common.rest_client import RestClient class NetworkClientJSON(RestClient): """ Tempest REST client for Neutron. Uses v2 of the Neutron API, since the V1 API has been removed from the code base. Implements create, delete, update, list and show for the basic Neutron abstractions (networks, sub-networks, routers, ports and floating IP): Implements add/remove interface to router using subnet ID / port ID It also implements list, show, update and reset for OpenStack Networking quotas """ def __init__(self, config, username, password, auth_url, tenant_name=None): super(NetworkClientJSON, self).__init__(config, username, password, auth_url, tenant_name) self.service = self.config.network.catalog_type self.version = '2.0' self.uri_prefix = "v%s" % (self.version) def list_networks(self): uri = '%s/networks' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def create_network(self, name, **kwargs): post_body = {'network': kwargs} post_body['network']['name'] = name body = json.dumps(post_body) uri = '%s/networks' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def create_bulk_network(self, count, names): network_list = list() for i in range(count): network_list.append({'name': names[i]}) post_body = {'networks': network_list} body = json.dumps(post_body) uri = '%s/networks' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def show_network(self, uuid): uri = '%s/networks/%s' % (self.uri_prefix, uuid) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def delete_network(self, uuid): uri = '%s/networks/%s' % (self.uri_prefix, uuid) resp, body = self.delete(uri, self.headers) return resp, body def create_subnet(self, net_uuid, cidr, ip_version=4, **kwargs): post_body = {'subnet': kwargs} post_body['subnet']['ip_version'] = ip_version post_body['subnet']['network_id'] = net_uuid post_body['subnet']['cidr'] = cidr body = json.dumps(post_body) uri = '%s/subnets' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def delete_subnet(self, uuid): uri = '%s/subnets/%s' % (self.uri_prefix, uuid) resp, body = self.delete(uri, self.headers) return resp, body def list_subnets(self): uri = '%s/subnets' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def show_subnet(self, uuid): uri = '%s/subnets/%s' % (self.uri_prefix, uuid) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def create_port(self, network_id, **kwargs): post_body = { 'port': { 'network_id': network_id, } } for key, val in kwargs.items(): post_body['port'][key] = val body = json.dumps(post_body) uri = '%s/ports' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def delete_port(self, port_id): uri = '%s/ports/%s' % (self.uri_prefix, port_id) resp, body = self.delete(uri, self.headers) return resp, body def list_ports(self, **filters): uri = '%s/ports' % (self.uri_prefix) filter_items = ["%s=%s" % (k, v) for (k, v) in filters.iteritems()] querystring = "&".join(filter_items) if querystring: uri = "%s?%s" % (uri, querystring) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def show_port(self, port_id): uri = '%s/ports/%s' % (self.uri_prefix, port_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def update_quotas(self, tenant_id, **kwargs): put_body = {'quota': kwargs} body = json.dumps(put_body) uri = '%s/quotas/%s' % (self.uri_prefix, tenant_id) resp, body = self.put(uri, body, self.headers) body = json.loads(body) return resp, body['quota'] def show_quotas(self, tenant_id): uri = '%s/quotas/%s' % (self.uri_prefix, tenant_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body['quota'] def reset_quotas(self, tenant_id): uri = '%s/quotas/%s' % (self.uri_prefix, tenant_id) resp, body = self.delete(uri, self.headers) return resp, body def list_quotas(self): uri = '%s/quotas' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body['quotas'] def update_subnet(self, subnet_id, new_name): put_body = { 'subnet': { 'name': new_name, } } body = json.dumps(put_body) uri = '%s/subnets/%s' % (self.uri_prefix, subnet_id) resp, body = self.put(uri, body=body, headers=self.headers) body = json.loads(body) return resp, body def update_port(self, port_id, new_name): put_body = { 'port': { 'name': new_name, } } body = json.dumps(put_body) uri = '%s/ports/%s' % (self.uri_prefix, port_id) resp, body = self.put(uri, body=body, headers=self.headers) body = json.loads(body) return resp, body def update_network(self, network_id, new_name): put_body = { "network": { "name": new_name, } } body = json.dumps(put_body) uri = '%s/networks/%s' % (self.uri_prefix, network_id) resp, body = self.put(uri, body=body, headers=self.headers) body = json.loads(body) return resp, body def list_routers(self): uri = '%s/routers' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def create_router(self, name, admin_state_up=True, **kwargs): post_body = {'router': kwargs} post_body['router']['name'] = name post_body['router']['admin_state_up'] = admin_state_up body = json.dumps(post_body) uri = '%s/routers' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def delete_router(self, router_id): uri = '%s/routers/%s' % (self.uri_prefix, router_id) resp, body = self.delete(uri, self.headers) return resp, body def show_router(self, router_id): uri = '%s/routers/%s' % (self.uri_prefix, router_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def _update_router(self, router_id, set_enable_snat, **kwargs): uri = '%s/routers/%s' % (self.uri_prefix, router_id) resp, body = self.get(uri, self.headers) body = json.loads(body) update_body = {} update_body['name'] = kwargs.get('name', body['router']['name']) update_body['admin_state_up'] = kwargs.get( 'admin_state_up', body['router']['admin_state_up']) cur_gw_info = body['router']['external_gateway_info'] if cur_gw_info and not set_enable_snat: cur_gw_info.pop('enable_snat', None) update_body['external_gateway_info'] = kwargs.get( 'external_gateway_info', body['router']['external_gateway_info']) update_body = dict(router=update_body) update_body = json.dumps(update_body) resp, body = self.put(uri, update_body, self.headers) body = json.loads(body) return resp, body def update_router(self, router_id, **kwargs): """Update a router leaving enable_snat to its default value.""" # If external_gateway_info contains enable_snat the request will fail # with 404 unless executed with admin client, and therefore we instruct # _update_router to not set this attribute # NOTE(salv-orlando): The above applies as long as Neutron's default # policy is to restrict enable_snat usage to admins only. return self._update_router(router_id, set_enable_snat=False, **kwargs) def update_router_with_snat_gw_info(self, router_id, **kwargs): """Update a router passing also the enable_snat attribute. This method must be execute with admin credentials, otherwise the API call will return a 404 error. """ return self._update_router(router_id, set_enable_snat=True, **kwargs) def add_router_interface_with_subnet_id(self, router_id, subnet_id): uri = '%s/routers/%s/add_router_interface' % (self.uri_prefix, router_id) update_body = {"subnet_id": subnet_id} update_body = json.dumps(update_body) resp, body = self.put(uri, update_body, self.headers) body = json.loads(body) return resp, body def add_router_interface_with_port_id(self, router_id, port_id): uri = '%s/routers/%s/add_router_interface' % (self.uri_prefix, router_id) update_body = {"port_id": port_id} update_body = json.dumps(update_body) resp, body = self.put(uri, update_body, self.headers) body = json.loads(body) return resp, body def remove_router_interface_with_subnet_id(self, router_id, subnet_id): uri = '%s/routers/%s/remove_router_interface' % (self.uri_prefix, router_id) update_body = {"subnet_id": subnet_id} update_body = json.dumps(update_body) resp, body = self.put(uri, update_body, self.headers) body = json.loads(body) return resp, body def remove_router_interface_with_port_id(self, router_id, port_id): uri = '%s/routers/%s/remove_router_interface' % (self.uri_prefix, router_id) update_body = {"port_id": port_id} update_body = json.dumps(update_body) resp, body = self.put(uri, update_body, self.headers) body = json.loads(body) return resp, body def create_floating_ip(self, ext_network_id, **kwargs): post_body = { 'floatingip': kwargs} post_body['floatingip']['floating_network_id'] = ext_network_id body = json.dumps(post_body) uri = '%s/floatingips' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def list_security_groups(self): uri = '%s/security-groups' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def delete_security_group(self, secgroup_id): uri = '%s/security-groups/%s' % (self.uri_prefix, secgroup_id) resp, body = self.delete(uri, self.headers) return resp, body def create_security_group(self, name, **kwargs): post_body = { 'security_group': { 'name': name, } } for key, value in kwargs.iteritems(): post_body['security_group'][str(key)] = value body = json.dumps(post_body) uri = '%s/security-groups' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def show_floating_ip(self, floating_ip_id): uri = '%s/floatingips/%s' % (self.uri_prefix, floating_ip_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def show_security_group(self, secgroup_id): uri = '%s/security-groups/%s' % (self.uri_prefix, secgroup_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def list_floating_ips(self): uri = '%s/floatingips' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def list_security_group_rules(self): uri = '%s/security-group-rules' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def delete_floating_ip(self, floating_ip_id): uri = '%s/floatingips/%s' % (self.uri_prefix, floating_ip_id) resp, body = self.delete(uri, self.headers) return resp, body def update_floating_ip(self, floating_ip_id, **kwargs): post_body = { 'floatingip': kwargs} body = json.dumps(post_body) uri = '%s/floatingips/%s' % (self.uri_prefix, floating_ip_id) resp, body = self.put(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def create_security_group_rule(self, secgroup_id, direction='ingress', **kwargs): post_body = { 'security_group_rule': { 'direction': direction, 'security_group_id': secgroup_id } } for key, value in kwargs.iteritems(): post_body['security_group_rule'][str(key)] = value body = json.dumps(post_body) uri = '%s/security-group-rules' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def create_bulk_subnet(self, subnet_list): post_body = {'subnets': subnet_list} body = json.dumps(post_body) uri = '%s/subnets' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def delete_security_group_rule(self, rule_id): uri = '%s/security-group-rules/%s' % (self.uri_prefix, rule_id) resp, body = self.delete(uri, self.headers) return resp, body def show_security_group_rule(self, rule_id): uri = '%s/security-group-rules/%s' % (self.uri_prefix, rule_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def create_bulk_port(self, port_list): post_body = {'ports': port_list} body = json.dumps(post_body) uri = '%s/ports' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def list_vips(self): uri = '%s/lb/vips' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def create_vip(self, name, protocol, protocol_port, subnet_id, pool_id): post_body = { "vip": { "protocol": protocol, "name": name, "subnet_id": subnet_id, "pool_id": pool_id, "protocol_port": protocol_port } } body = json.dumps(post_body) uri = '%s/lb/vips' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def create_pool(self, name, lb_method, protocol, subnet_id): post_body = { "pool": { "protocol": protocol, "name": name, "subnet_id": subnet_id, "lb_method": lb_method } } body = json.dumps(post_body) uri = '%s/lb/pools' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def show_vip(self, uuid): uri = '%s/lb/vips/%s' % (self.uri_prefix, uuid) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def delete_vip(self, uuid): uri = '%s/lb/vips/%s' % (self.uri_prefix, uuid) resp, body = self.delete(uri, self.headers) return resp, body def delete_pool(self, uuid): uri = '%s/lb/pools/%s' % (self.uri_prefix, uuid) resp, body = self.delete(uri, self.headers) return resp, body def update_vip(self, vip_id, new_name): put_body = { "vip": { "name": new_name, } } body = json.dumps(put_body) uri = '%s/lb/vips/%s' % (self.uri_prefix, vip_id) resp, body = self.put(uri, body=body, headers=self.headers) body = json.loads(body) return resp, body def update_pool(self, pool_id, new_name): put_body = { "pool": { "name": new_name, } } body = json.dumps(put_body) uri = '%s/lb/pools/%s' % (self.uri_prefix, pool_id) resp, body = self.put(uri, body=body, headers=self.headers) body = json.loads(body) return resp, body def list_pools(self): uri = '%s/lb/pools' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def show_pool(self, uuid): uri = '%s/lb/pools/%s' % (self.uri_prefix, uuid) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def list_members(self): uri = '%s/lb/members' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def create_member(self, address, protocol_port, pool_id): post_body = { "member": { "protocol_port": protocol_port, "pool_id": pool_id, "address": address } } body = json.dumps(post_body) uri = '%s/lb/members' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def show_member(self, uuid): uri = '%s/lb/members/%s' % (self.uri_prefix, uuid) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def delete_member(self, uuid): uri = '%s/lb/members/%s' % (self.uri_prefix, uuid) resp, body = self.delete(uri, self.headers) return resp, body def update_member(self, admin_state_up, member_id): put_body = { "member": { "admin_state_up": admin_state_up } } body = json.dumps(put_body) uri = '%s/lb/members/%s' % (self.uri_prefix, member_id) resp, body = self.put(uri, body=body, headers=self.headers) body = json.loads(body) return resp, body def list_health_monitors(self): uri = '%s/lb/health_monitors' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def create_health_monitor(self, delay, max_retries, Type, timeout): post_body = { "health_monitor": { "delay": delay, "max_retries": max_retries, "type": Type, "timeout": timeout } } body = json.dumps(post_body) uri = '%s/lb/health_monitors' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def show_health_monitor(self, uuid): uri = '%s/lb/health_monitors/%s' % (self.uri_prefix, uuid) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def delete_health_monitor(self, uuid): uri = '%s/lb/health_monitors/%s' % (self.uri_prefix, uuid) resp, body = self.delete(uri, self.headers) return resp, body def update_health_monitor(self, admin_state_up, uuid): put_body = { "health_monitor": { "admin_state_up": admin_state_up } } body = json.dumps(put_body) uri = '%s/lb/health_monitors/%s' % (self.uri_prefix, uuid) resp, body = self.put(uri, body=body, headers=self.headers) body = json.loads(body) return resp, body def associate_health_monitor_with_pool(self, health_monitor_id, pool_id): post_body = { "health_monitor": { "id": health_monitor_id, } } body = json.dumps(post_body) uri = '%s/lb/pools/%s/health_monitors' % (self.uri_prefix, pool_id) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def disassociate_health_monitor_with_pool(self, health_monitor_id, pool_id): uri = '%s/lb/pools/%s/health_monitors/%s' % (self.uri_prefix, pool_id, health_monitor_id) resp, body = self.delete(uri, headers=self.headers) return resp, body def list_extensions(self): uri = '%s/extensions' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def show_extension_details(self, ext_alias): uri = '%s/extensions/%s' % (self.uri_prefix, ext_alias) resp, body = self.get(uri, headers=self.headers) body = json.loads(body) return resp, body def list_vpn_services(self): uri = '%s/vpn/vpnservices' % (self.uri_prefix) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def create_vpn_service(self, subnet_id, router_id, **kwargs): post_body = { "vpnservice": { "subnet_id": subnet_id, "router_id": router_id } } for key, val in kwargs.items(): post_body['vpnservice'][key] = val body = json.dumps(post_body) uri = '%s/vpn/vpnservices' % (self.uri_prefix) resp, body = self.post(uri, headers=self.headers, body=body) body = json.loads(body) return resp, body def show_vpn_service(self, uuid): uri = '%s/vpn/vpnservices/%s' % (self.uri_prefix, uuid) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def delete_vpn_service(self, uuid): uri = '%s/vpn/vpnservices/%s' % (self.uri_prefix, uuid) resp, body = self.delete(uri, self.headers) return resp, body def update_vpn_service(self, uuid, description): put_body = { "vpnservice": { "description": description } } body = json.dumps(put_body) uri = '%s/vpn/vpnservices/%s' % (self.uri_prefix, uuid) resp, body = self.put(uri, body=body, headers=self.headers) body = json.loads(body) return resp, body def list_router_interfaces(self, uuid): uri = '%s/ports?device_id=%s' % (self.uri_prefix, uuid) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def list_agents(self): uri = '%s/agents' % self.uri_prefix resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def list_routers_on_l3_agent(self, agent_id): uri = '%s/agents/%s/l3-routers' % (self.uri_prefix, agent_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def list_l3_agents_hosting_router(self, router_id): uri = '%s/routers/%s/l3-agents' % (self.uri_prefix, router_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def list_service_providers(self): uri = '%s/service-providers' % self.uri_prefix resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def list_dhcp_agent_hosting_network(self, network_id): uri = '%s/networks/%s/dhcp-agents' % (self.uri_prefix, network_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def list_networks_hosted_by_one_dhcp_agent(self, agent_id): uri = '%s/agents/%s/dhcp-networks' % (self.uri_prefix, agent_id) resp, body = self.get(uri, self.headers) body = json.loads(body) return resp, body def remove_network_from_dhcp_agent(self, agent_id, network_id): uri = '%s/agents/%s/dhcp-networks/%s' % (self.uri_prefix, agent_id, network_id) resp, body = self.delete(uri, self.headers) return resp, body
apache-2.0
1,793,466,490,960,584,200
35.647141
79
0.562947
false
3.542195
false
false
false
SeyVu/subscription_renewal
telecom_churn.py
1
7941
######################################################################################################### # Description: Main file for telecom_churn dataset. Key function is to transform dataset into needed # input_features and output ######################################################################################################### import os import time import logging import logging.config # Imports for various models (Turn on as needed) from sklearn.ensemble import RandomForestClassifier as RandomForest # from sklearn.ensemble import BaggingClassifier as Bagging # from sklearn.svm import SVC as SVC # Support vector machines # from sklearn.neighbors import KNeighborsClassifier as KNN # from sklearn.linear_model import LogisticRegression as LogReg # from sklearn.linear_model import RidgeClassifier as Ridge # from sknn.mlp import Classifier as NeuralNetClassifier, Layer as NeuralNetLayer from sklearn.ensemble import GradientBoostingClassifier as GradBoost from sklearn.preprocessing import StandardScaler # Import Python libs import pandas as pd import numpy as np # Import from within project import support_functions as sf import ensemble_models import visualization ######################################################################################################### # Global variables __author__ = "DataCentric1" __pass__ = 1 __fail__ = 0 ######################################################################################################### # Setup logging logging.config.fileConfig('logging.conf') logger = logging.getLogger("info") ######################################################################################################### def telecom_churn(use_synthetic_data=False, feature_scaling=True): logger.debug("Importing data") if use_synthetic_data: if os.path.isfile('data/data_synthetic.csv'): churn_df = pd.read_csv('data/data_synthetic.csv', sep=',') else: raise ValueError("Synthetic data not available") # split rows for working on partial data start_row = 5000 end_row = 9999 else: churn_df = pd.read_csv('data/train_data.csv', sep=', ') # split rows for working on partial data start_row = 0 end_row = 4999 churn_df = churn_df.iloc[start_row:end_row].copy() churn_df = churn_df.reset_index() col_names = churn_df.columns.tolist() logger.info(sf.Color.BOLD + sf.Color.GREEN + "Column names:" + sf.Color.END) logger.info(col_names) to_show = col_names[:6] + col_names[-6:] logger.info(sf.Color.BOLD + sf.Color.GREEN + "Sample data:" + sf.Color.END) logger.info(churn_df[to_show].head(6)) # Isolate target data churn_result = churn_df['Churn?'] y = np.where(churn_result == 'True.', 1, 0) logger.debug(y) # We don't need these columns. Index is created only when do a partial split if 'index' in col_names: to_drop = ['index', 'Area Code', 'Phone', 'Churn?'] else: to_drop = ['Area Code', 'Phone', 'Churn?'] churn_feat_space = churn_df.drop(to_drop, axis=1) # 'yes'/'no' has to be converted to boolean values # NumPy converts these from boolean to 1. and 0. later yes_no_cols = ["Int'l Plan", "VMail Plan"] churn_feat_space[yes_no_cols] = (churn_feat_space[yes_no_cols] == 'yes') # Below segment replaces column 'State' with a number for each state (alphabetically sorted) # separate state into it's own df as it's easier to operate on later churn_feat_state = churn_feat_space[['State']] state = np.unique(churn_feat_state) for index, row in churn_feat_state.iterrows(): churn_feat_state.iat[index, 0] = int(np.where(state == row['State'])[0]) churn_feat_space['State'] = churn_feat_state # logger.debug(churn_feat_space['State']) feature_names = churn_feat_space.columns.tolist() logger.debug(feature_names) x = churn_feat_space.as_matrix().astype(np.float) if feature_scaling: # Feature scaling and normalization scaler = StandardScaler() x = scaler.fit_transform(x) logger.debug(x) y = np.array(y) logger.info("Feature space holds %d observations and %d features" % x.shape) logger.info("Unique target labels: ") logger.info(np.unique(y)) return [x, y] #################################################################################################### if __name__ == "__main__": start_time = time.time() # Create precision_recall-curve? prec_recall_plot = True # Choose models for the ensemble. Uncomment to choose model needed estimator_model0 = RandomForest estimator_keywords_model0 = dict(n_estimators=1000, verbose=0, criterion='entropy', n_jobs=-1, max_features=5, class_weight='auto') estimator_model1 = GradBoost estimator_keywords_model1 = dict(n_estimators=1000, loss='deviance', learning_rate=0.01, verbose=0, max_depth=5, subsample=1.0) model_names_list = dict(model0=estimator_model0, model1=estimator_model1) model_parameters_list = dict(model0=estimator_keywords_model0, model1=estimator_keywords_model1) [input_features, output] = telecom_churn(use_synthetic_data=False, feature_scaling=True) # ensemble_models.majority_voting(input_features, output, model_names_list, model_parameters_list, # run_cv_flag=True, num_model_iterations=1, plot_learning_curve=False, # run_prob_predictions=True, classification_threshold=0.45) if prec_recall_plot: # Divide 0 and 0.9 by 21 equally distributed values (including both). # Ignoring 1.0 as it has Fbeta_score of 0 num_of_thresholds = np.linspace(0, 0.9, 21) threshold = np.zeros((len(num_of_thresholds), 1), dtype=float) precision = np.zeros((len(num_of_thresholds), 1), dtype=float) recall = np.zeros((len(num_of_thresholds), 1), dtype=float) fbeta_score = np.zeros((len(num_of_thresholds), 1), dtype=float) idx = 0 for classification_threshold in num_of_thresholds: prec_recall = ensemble_models.average_prob(input_features, output, model_names_list, model_parameters_list, run_cv_flag=False, num_model_iterations=1, plot_learning_curve=False, run_prob_predictions=True, classification_threshold=classification_threshold) threshold[idx] = classification_threshold precision[idx] = round(prec_recall[0] * 100) # Convert to % recall[idx] = round(prec_recall[1] * 100) fbeta_score[idx] = round(prec_recall[2] * 100) idx += 1 # Call function for plotting vis = visualization.Plots() vis.basic_2d_plot(x=threshold, y=(precision, recall, fbeta_score), legends=("Precision", "Recall", "Fbeta_score (beta=2)"), title="Precision Recall Curve", xaxis_label="Classification Threshold", yaxis_label="Score %") ################################## # Other model # estimator = SVC # estimator_keywords = dict(C=1, kernel='rbf', class_weight='auto') # estimator_model2 = LogReg # estimator_keywords_model2 = dict(solver='liblinear') # Neural network # estimator = NeuralNetClassifier # estimator_keywords = dict(layers=[NeuralNetLayer("Rectifier", units=64), NeuralNetLayer("Rectifier", units=32), # NeuralNetLayer("Softmax")], # learning_rate=0.001, n_iter=50) ################################## print("Total time: %0.3f" % float(time.time() - start_time))
mit
-5,764,745,351,355,387,000
37.548544
119
0.581287
false
3.956652
false
false
false
davy39/eric
Preferences/ConfigurationPages/Ui_EditorAPIsPage.py
1
7873
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file './Preferences/ConfigurationPages/EditorAPIsPage.ui' # # Created: Tue Nov 18 17:53:57 2014 # by: PyQt5 UI code generator 5.3.2 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_EditorAPIsPage(object): def setupUi(self, EditorAPIsPage): EditorAPIsPage.setObjectName("EditorAPIsPage") EditorAPIsPage.resize(462, 422) self.vboxlayout = QtWidgets.QVBoxLayout(EditorAPIsPage) self.vboxlayout.setObjectName("vboxlayout") self.headerLabel = QtWidgets.QLabel(EditorAPIsPage) self.headerLabel.setObjectName("headerLabel") self.vboxlayout.addWidget(self.headerLabel) self.line5 = QtWidgets.QFrame(EditorAPIsPage) self.line5.setFrameShape(QtWidgets.QFrame.HLine) self.line5.setFrameShadow(QtWidgets.QFrame.Sunken) self.line5.setFrameShape(QtWidgets.QFrame.HLine) self.line5.setFrameShadow(QtWidgets.QFrame.Sunken) self.line5.setObjectName("line5") self.vboxlayout.addWidget(self.line5) self.apiAutoPrepareCheckBox = QtWidgets.QCheckBox(EditorAPIsPage) self.apiAutoPrepareCheckBox.setObjectName("apiAutoPrepareCheckBox") self.vboxlayout.addWidget(self.apiAutoPrepareCheckBox) self.hboxlayout = QtWidgets.QHBoxLayout() self.hboxlayout.setObjectName("hboxlayout") self.TextLabel1_3_3 = QtWidgets.QLabel(EditorAPIsPage) self.TextLabel1_3_3.setToolTip("") self.TextLabel1_3_3.setObjectName("TextLabel1_3_3") self.hboxlayout.addWidget(self.TextLabel1_3_3) self.apiLanguageComboBox = QtWidgets.QComboBox(EditorAPIsPage) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.apiLanguageComboBox.sizePolicy().hasHeightForWidth()) self.apiLanguageComboBox.setSizePolicy(sizePolicy) self.apiLanguageComboBox.setObjectName("apiLanguageComboBox") self.hboxlayout.addWidget(self.apiLanguageComboBox) self.vboxlayout.addLayout(self.hboxlayout) self.apiGroup = QtWidgets.QGroupBox(EditorAPIsPage) self.apiGroup.setEnabled(False) self.apiGroup.setObjectName("apiGroup") self.gridlayout = QtWidgets.QGridLayout(self.apiGroup) self.gridlayout.setObjectName("gridlayout") self.apiList = QtWidgets.QListWidget(self.apiGroup) self.apiList.setAlternatingRowColors(True) self.apiList.setObjectName("apiList") self.gridlayout.addWidget(self.apiList, 0, 0, 1, 4) self.deleteApiFileButton = QtWidgets.QPushButton(self.apiGroup) self.deleteApiFileButton.setObjectName("deleteApiFileButton") self.gridlayout.addWidget(self.deleteApiFileButton, 1, 0, 1, 1) self.addApiFileButton = QtWidgets.QPushButton(self.apiGroup) self.addApiFileButton.setObjectName("addApiFileButton") self.gridlayout.addWidget(self.addApiFileButton, 1, 1, 1, 1) self.apiFileEdit = QtWidgets.QLineEdit(self.apiGroup) self.apiFileEdit.setObjectName("apiFileEdit") self.gridlayout.addWidget(self.apiFileEdit, 1, 2, 1, 1) self.addInstalledApiFileButton = QtWidgets.QPushButton(self.apiGroup) self.addInstalledApiFileButton.setObjectName("addInstalledApiFileButton") self.gridlayout.addWidget(self.addInstalledApiFileButton, 2, 1, 1, 3) self.addPluginApiFileButton = QtWidgets.QPushButton(self.apiGroup) self.addPluginApiFileButton.setObjectName("addPluginApiFileButton") self.gridlayout.addWidget(self.addPluginApiFileButton, 3, 1, 1, 3) self.line = QtWidgets.QFrame(self.apiGroup) self.line.setFrameShape(QtWidgets.QFrame.HLine) self.line.setFrameShadow(QtWidgets.QFrame.Sunken) self.line.setObjectName("line") self.gridlayout.addWidget(self.line, 4, 0, 1, 4) self.prepareApiButton = QtWidgets.QPushButton(self.apiGroup) self.prepareApiButton.setObjectName("prepareApiButton") self.gridlayout.addWidget(self.prepareApiButton, 5, 0, 1, 2) self.prepareApiProgressBar = QtWidgets.QProgressBar(self.apiGroup) self.prepareApiProgressBar.setProperty("value", 0) self.prepareApiProgressBar.setTextVisible(False) self.prepareApiProgressBar.setOrientation(QtCore.Qt.Horizontal) self.prepareApiProgressBar.setObjectName("prepareApiProgressBar") self.gridlayout.addWidget(self.prepareApiProgressBar, 5, 2, 1, 2) self.apiFileButton = QtWidgets.QToolButton(self.apiGroup) self.apiFileButton.setObjectName("apiFileButton") self.gridlayout.addWidget(self.apiFileButton, 1, 3, 1, 1) self.vboxlayout.addWidget(self.apiGroup) self.retranslateUi(EditorAPIsPage) QtCore.QMetaObject.connectSlotsByName(EditorAPIsPage) EditorAPIsPage.setTabOrder(self.apiAutoPrepareCheckBox, self.apiLanguageComboBox) EditorAPIsPage.setTabOrder(self.apiLanguageComboBox, self.apiList) EditorAPIsPage.setTabOrder(self.apiList, self.deleteApiFileButton) EditorAPIsPage.setTabOrder(self.deleteApiFileButton, self.apiFileEdit) EditorAPIsPage.setTabOrder(self.apiFileEdit, self.apiFileButton) EditorAPIsPage.setTabOrder(self.apiFileButton, self.addApiFileButton) EditorAPIsPage.setTabOrder(self.addApiFileButton, self.addInstalledApiFileButton) EditorAPIsPage.setTabOrder(self.addInstalledApiFileButton, self.addPluginApiFileButton) EditorAPIsPage.setTabOrder(self.addPluginApiFileButton, self.prepareApiButton) def retranslateUi(self, EditorAPIsPage): _translate = QtCore.QCoreApplication.translate self.headerLabel.setText(_translate("EditorAPIsPage", "<b>Configure API files</b>")) self.apiAutoPrepareCheckBox.setToolTip(_translate("EditorAPIsPage", "Select to compile the APIs automatically upon loading")) self.apiAutoPrepareCheckBox.setText(_translate("EditorAPIsPage", "Compile APIs automatically")) self.TextLabel1_3_3.setText(_translate("EditorAPIsPage", "Language:")) self.apiLanguageComboBox.setToolTip(_translate("EditorAPIsPage", "Select the language to be configured.")) self.apiGroup.setTitle(_translate("EditorAPIsPage", "APIs")) self.apiList.setToolTip(_translate("EditorAPIsPage", "List of API files")) self.deleteApiFileButton.setToolTip(_translate("EditorAPIsPage", "Press to delete the selected file from the list")) self.deleteApiFileButton.setText(_translate("EditorAPIsPage", "Delete")) self.addApiFileButton.setToolTip(_translate("EditorAPIsPage", "Press to add the entered file to the list")) self.addApiFileButton.setText(_translate("EditorAPIsPage", "Add")) self.apiFileEdit.setToolTip(_translate("EditorAPIsPage", "Enter a file to be added")) self.addInstalledApiFileButton.setToolTip(_translate("EditorAPIsPage", "Press to select an API file from the list of installed API files")) self.addInstalledApiFileButton.setText(_translate("EditorAPIsPage", "Add from installed APIs")) self.addPluginApiFileButton.setToolTip(_translate("EditorAPIsPage", "Press to select an API file from the list of API files installed by plugins")) self.addPluginApiFileButton.setText(_translate("EditorAPIsPage", "Add from Plugin APIs")) self.prepareApiButton.setToolTip(_translate("EditorAPIsPage", "Press to compile the selected APIs definition")) self.prepareApiButton.setText(_translate("EditorAPIsPage", "Compile APIs")) self.apiFileButton.setToolTip(_translate("EditorAPIsPage", "Press to select an API file via a selection dialog"))
gpl-3.0
-3,366,846,485,868,375,600
63.532787
155
0.742665
false
3.884065
false
false
false
ozsolarwind/siren
powermodel.py
1
132240
#!/usr/bin/python3 # # Copyright (C) 2015-2020 Sustainable Energy Now Inc., Angus King # # powermodel.py - This file is part of SIREN. # # SIREN is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # SIREN is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General # Public License along with SIREN. If not, see # <http://www.gnu.org/licenses/>. # from math import asin, ceil, cos, fabs, floor, log10, pow, radians, sin, sqrt import pylab as plt from matplotlib.font_manager import FontProperties import numpy as np import csv import openpyxl as oxl import os import sys import ssc import time import xlrd import xlwt import configparser # decode .ini file from PyQt5 import Qt, QtCore, QtGui, QtWidgets from senuser import getUser, techClean import displayobject import displaytable from editini import SaveIni from getmodels import getModelFile from grid import Grid from parents import getParents from powerclasses import * from superpower import SuperPower from sirenicons import Icons # import Station from turbine import Turbine from visualise import Visualise from zoompan import ZoomPanX the_days = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] class PowerModel(): powerExit = QtCore.pyqtSignal(str) # barProgress = QtCore.pyqtSignal(int, str) # barRange = QtCore.pyqtSignal(int, int) def showGraphs(self, ydata, x): def shrinkKey(key): remove = ['Biomass', 'Community', 'Farm', 'Fixed', 'Geothermal', 'Hydro', 'Pumped', 'PV', 'Rooftop', 'Solar', 'Station', 'Thermal', 'Tracking', 'Wave', 'Wind'] oukey = key for i in range(len(remove)): oukey = oukey.replace(remove[i], '') oukey = ' '.join(oukey.split()) if oukey == '' or oukey == 'Existing': return key else: return oukey def stepPlot(self, period, data, x_labels=None): k1 = list(data.keys())[0] if self.plots['cumulative']: pc = 1 else: pc = 0 if self.plots['gross_load']: pc += 1 if self.plots['shortfall']: pc += 1 fig = plt.figure(self.hdrs['by_' + period].title() + self.suffix) plt.grid(True) bbdx = fig.add_subplot(111) plt.title(self.hdrs['by_' + period].title() + self.suffix) maxy = 0 miny = 0 xs = [] for i in range(len(data[k1]) + 1): xs.append(i) if self.plots['save_plot']: sp_data = [] sp_data.append(xs[1:]) if period == 'day': sp_vals = [period, 'Date'] sp_data.append([]) mm = 0 dy = 1 for d in range(1, len(xs)): sp_data[-1].append('%s-%s-%s' % (self.load_year, str(mm + 1).zfill(2), str(dy).zfill(2))) dy += 1 if dy > the_days[mm]: mm += 1 dy = 1 else: sp_vals = ['No.', period] sp_data.append(x_labels) if self.plots['cumulative']: cumulative = [0.] for i in range(len(data[k1])): cumulative.append(0.) load = [] i = -1 storage = None if self.plots['show_pct']: load_sum = 0. gen_sum = 0. for key, value in iter(sorted(ydata.items())): if key == 'Generation': continue dval = [0.] if self.plots['show_pct']: for d in range(len(data[key])): if key[:4] == 'Load': for k in range(len(data[key][d])): load_sum += data[key][d][k] elif key == 'Storage': pass else: for k in range(len(data[key][d])): gen_sum += data[key][d][k] if self.plots['gross_load'] and key == 'Existing Rooftop PV': load_sum += data[key][d][k] for d in range(len(data[key])): dval.append(0.) for k in range(len(data[key][0])): dval[-1] += data[key][d][k] / 1000 maxy = max(maxy, max(dval)) lw = self.other_width if self.plots['cumulative'] and key[:4] != 'Load' and key != 'Storage': lw = 1.0 for j in range(len(xs)): cumulative[j] += dval[j] bbdx.step(xs, dval, linewidth=lw, label=shrinkKey(key), color=self.colours[key], linestyle=self.linestyle[key]) if self.plots['save_plot']: sp_vals.append(shrinkKey(key)) sp_data.append(dval[1:]) if (self.plots['shortfall'] or self.plots['show_load']) and key[:4] == 'Load': load = dval[:] if self.plots['shortfall'] and key == 'Storage': storage = dval[:] if self.plots['show_pct']: self.gen_pct = ' (%s%% of load)' % '{:0,.1f}'.format(gen_sum * 100. / load_sum) plt.title(self.hdrs['by_' + period].title() + self.suffix + self.gen_pct) if self.plots['cumulative']: bbdx.step(xs, cumulative, linewidth=self.other_width, label='Tot. Generation', color=self.colours['cumulative']) maxy = max(maxy, max(cumulative)) if self.plots['save_plot']: sp_vals.append('Tot. Generation') sp_data.append(cumulative[1:]) try: rndup = pow(10, round(log10(maxy * 1.5) - 1)) / 2 maxy = ceil(maxy / rndup) * rndup except: pass if (self.plots['shortfall'] and self.do_load): load2 = [] if storage is None: for i in range(len(cumulative)): load2.append(cumulative[i] - load[i]) if load2[-1] < miny: miny = load2[-1] else: for i in range(len(cumulative)): load2.append(cumulative[i] + storage[i] - load[i]) if load2[-1] < miny: miny = load2[-1] bbdx.step(xs, load2, linewidth=self.other_width, label='Shortfall', color=self.colours['shortfall']) plt.axhline(0, color='black') if rndup != 0 and miny < 0: miny = -ceil(-miny / rndup) * rndup if self.plots['save_plot']: sp_vals.append('Shortfall') sp_data.append(load2[1:]) else: miny = 0 if self.plots['save_plot']: titl = 'By_' + period decpts = [3] * len(sp_vals) decpts[0] = decpts[1] = 0 dialog = displaytable.Table(list(map(list, list(zip(*sp_data)))), title=titl, fields=sp_vals, save_folder=self.scenarios, decpts=decpts) dialog.exec_() del dialog, sp_data, sp_vals plt.ylim([miny, maxy]) plt.xlim([0, len(data[k1])]) if (len(ydata) + pc) > 9: # Shrink current axis by 5% box = bbdx.get_position() bbdx.set_position([box.x0, box.y0, box.width * 0.95, box.height]) # Put a legend to the right of the current axis bbdx.legend(loc='center left', bbox_to_anchor=(1, 0.5), prop=lbl_font) else: bbdx.legend(bbox_to_anchor=[0.5, -0.1], loc='center', ncol=(len(ydata) + pc), prop=lbl_font) rotn = 'horizontal' if len(data[k1]) > 12: stp = 7 rotn = 'vertical' else: stp = 1 plt.xticks(list(range(0, len(data[k1]), stp))) tick_spot = [] for i in range(0, len(data[k1]), stp): tick_spot.append(i + .5) bbdx.set_xticks(tick_spot) bbdx.set_xticklabels(x_labels, rotation=rotn) bbdx.set_xlabel(period.title() + ' of the year') bbdx.set_ylabel('Energy (MWh)') if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) if self.plots['block']: plt.show(block=True) else: plt.draw() def dayPlot(self, period, data, per_labels=None, x_labels=None): k1 = list(data.keys())[0] if self.plots['cumulative']: pc = 1 else: pc = 0 if self.plots['gross_load']: pc += 1 if self.plots['shortfall']: pc += 1 hdr = self.hdrs[period].replace('Power - ', '') plt.figure(hdr + self.suffix) plt.suptitle(self.hdrs[period] + self.suffix, fontsize=16) maxy = 0 miny = 0 if len(data[k1]) > 9: p1 = 3 p2 = 4 xl = 8 yl = [0, 4, 8] elif len(data[k1]) > 6: p1 = 3 p2 = 3 xl = 6 yl = [0, 3, 6] elif len(data[k1]) > 4: p1 = 2 p2 = 3 xl = 3 yl = [0, 3] elif len(data[k1]) > 2: p1 = 2 p2 = 2 xl = 2 yl = [0, 2] else: p1 = 1 p2 = 2 xl = 0 yl = [0, 1] for key in list(data.keys()): for p in range(len(data[key])): maxy = max(maxy, max(data[key][p])) if self.plots['show_pct']: load_sum = [] gen_sum = [] for p in range(len(data[k1])): load_sum.append(0.) gen_sum.append(0.) for p in range(len(data[k1])): if self.plots['cumulative']: cumulative = [] for i in range(len(x24)): cumulative.append(0.) if self.plots['gross_load']: gross_load = [] for i in range(len(x24)): gross_load.append(0.) px = plt.subplot(p1, p2, p + 1) l_k = '' for key, value in iter(sorted(ydata.items())): if key == 'Generation': continue if key[:4] == 'Load': l_k = key if self.plots['show_pct']: for d in range(len(data[key][p])): if key[:4] == 'Load': load_sum[p] += data[key][p][d] elif key == 'Storage': pass else: gen_sum[p] += data[key][p][d] if self.plots['gross_load'] and key == 'Existing Rooftop PV': load_sum[p] += data[key][p][d] lw = self.other_width if self.plots['cumulative'] and key[:4] != 'Load' and key != 'Storage': lw = 1.0 for j in range(len(x24)): cumulative[j] += data[key][p][j] if self.plots['gross_load'] and (key[:4] == 'Load' or key == 'Existing Rooftop PV'): for j in range(len(x24)): gross_load[j] += data[key][p][j] px.plot(x24, data[key][p], linewidth=lw, label=shrinkKey(key), color=self.colours[key], linestyle=self.linestyle[key]) plt.title(per_labels[p]) if self.plots['cumulative']: px.plot(x24, cumulative, linewidth=self.other_width, label='Tot. Generation', color=self.colours['cumulative']) maxy = max(maxy, max(cumulative)) if self.plots['gross_load'] and 'Existing Rooftop PV' in list(ydata.keys()): px.plot(x24, gross_load, linewidth=1.0, label='Gross Load', color=self.colours['gross_load']) maxy = max(maxy, max(gross_load)) if self.plots['shortfall'] and self.do_load: load2 = [] for i in range(len(x24)): load2.append(cumulative[i] - data[l_k][p][i]) miny = min(miny, min(load2)) px.plot(x24, load2, linewidth=self.other_width, label='Shortfall', color=self.colours['shortfall']) plt.axhline(0, color='black') plt.xticks(list(range(4, 25, 4))) px.set_xticklabels(x_labels[1:]) plt.xlim([1, 24]) if p >= xl: px.set_xlabel('Hour of the Day') if p in yl: px.set_ylabel('Power (MW)') try: rndup = pow(10, round(log10(maxy * 1.5) - 1)) / 2 maxy = ceil(maxy / rndup) * rndup except: pass if self.plots['shortfall']: if rndup != 0 and miny < 0: miny = -ceil(-miny / rndup) * rndup for p in range(len(data[k1])): px = plt.subplot(p1, p2, p + 1) plt.ylim([miny, maxy]) plt.xlim([1, 24]) if self.plots['show_pct']: pct = ' (%s%%)' % '{:0,.1f}'.format(gen_sum[p] * 100. / load_sum[p]) titl = px.get_title() px.set_title(titl + pct) # px.annotate(pct, xy=(1.0, 3.0)) px = plt.subplot(p1, p2, len(data[k1])) # px.legend(bbox_to_anchor=[1., -0.15], loc='best', ncol=min((len(ly) + pc), 9), # prop=lbl_font) if (len(ydata) + pc) > 9: if len(data[k1]) > 9: do_in = [1, 5, 9, 2, 6, 10, 3, 7, 11, 4, 8, 12] elif len(data[k1]) > 6: do_in = [1, 4, 7, 2, 5, 8, 3, 6, 9] elif len(data[k1]) > 4: do_in = [1, 4, 2, 5, 3, 6] elif len(data[k1]) > 2: do_in = [1, 3, 2, 4] else: do_in = [1, 2] do_in = do_in[:len(data[k1])] for i in range(len(do_in)): px = plt.subplot(p1, p2, do_in[i]) # Shrink current axis by 5% box = px.get_position() px.set_position([box.x0, box.y0, box.width * 0.95, box.height]) # Put a legend to the right of the current axis px.legend(loc='center left', bbox_to_anchor=(1, 0.5), prop=lbl_font) else: px.legend(bbox_to_anchor=[0.5, -0.1], loc='center', ncol=(len(ydata) + pc), prop=lbl_font) if self.plots['show_pct']: for p in range(1, len(gen_sum)): load_sum[0] += load_sum[p] gen_sum[0] += gen_sum[p] self.gen_pct = ' (%s%%) of load)' % '{:0,.1f}'.format(gen_sum[0] * 100. / load_sum[0]) titl = px.get_title() plt.suptitle(self.hdrs[period] + self.suffix + self.gen_pct, fontsize=16) if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) if self.plots['block']: plt.show(block=True) else: plt.draw() def saveBalance(self, shortstuff): data_file = 'Powerbalance_data_%s.xls' % ( QtCore.QDateTime.toString(QtCore.QDateTime.currentDateTime(), 'yyyy-MM-dd_hhmm')) data_file = QtWidgets.QFileDialog.getSaveFileName(None, 'Save Powerbalance data file', self.scenarios + data_file, 'Excel Files (*.xls*);;CSV Files (*.csv)')[0] if data_file != '': if data_file[-4:] == '.csv' or data_file[-4:] == '.xls' \ or data_file[-5:] == '.xlsx': pass else: data_file += '.xls' if os.path.exists(data_file): if os.path.exists(data_file + '~'): os.remove(data_file + '~') os.rename(data_file, data_file + '~') stns = {} techs = {} for i in range(len(self.power_summary)): stns[self.power_summary[i].name] = i techs[self.power_summary[i].technology] = [0., 0., 0.] if data_file[-4:] == '.csv': tf = open(data_file, 'w') line = 'Generation Summary Table' tf.write(line + '\n') line = 'Name,Technology,Capacity (MW),CF,Generation' if getattr(self.power_summary[0], 'transmitted') != None: line += ',Transmitted' tf.write(line + '\n') for key, value in iter(sorted(stns.items())): if self.power_summary[value].generation > 0: cf = '{:0.2f}'.format(self.power_summary[value].generation / (self.power_summary[value].capacity * 8760)) else: cf = '' if self.power_summary[value].transmitted is not None: ts = '{:0.2f}'.format(self.power_summary[value].transmitted) techs[self.power_summary[value].technology][2] += self.power_summary[value].transmitted else: ts = '' line = '"%s",%s,%s,%s,%s,%s' % (self.power_summary[value].name, self.power_summary[value].technology, '{:0.2f}'.format(self.power_summary[value].capacity), cf, '{:0.0f}'.format(self.power_summary[value].generation), ts) techs[self.power_summary[value].technology][0] += self.power_summary[value].capacity techs[self.power_summary[value].technology][1] += self.power_summary[value].generation tf.write(line + '\n') total = [0., 0., 0.] for key, value in iter(sorted(techs.items())): total[0] += value[0] total[1] += value[1] if value[2] > 0: v2 = ',{:0.0f}'.format(value[2]) total[2] += value[2] else: v2 = '' line = ',%s,%s,,%s%s' % (key, '{:0.2f}'.format(value[0]), '{:0.0f}'.format(value[1]), v2) tf.write(line + '\n') if total[2] > 0: v2 = ',{:0.0f}'.format(total[2]) total[2] += value[2] else: v2 = '' line = ',Total,%s,,%s%s' % ('{:0.2f}'.format(total[0]), '{:0.0f}'.format(total[1]), v2) tf.write(line + '\n') line = '\nHourly Shortfall Table' tf.write(line + '\n') line = 'Hour,Period,Shortfall' tf.write(line + '\n') for i in range(len(shortstuff)): line = '%s,%s,%s' % (str(shortstuff[i].hour), shortstuff[i].period, '{:0.2f}'.format(shortstuff[i].shortfall)) tf.write(line + '\n') tf.close() else: wb = xlwt.Workbook() fnt = xlwt.Font() fnt.bold = True styleb = xlwt.XFStyle() styleb.font = fnt style2d = xlwt.XFStyle() style2d.num_format_str = '#,##0.00' style0d = xlwt.XFStyle() style0d.num_format_str = '#,##0' pattern = xlwt.Pattern() pattern.pattern = xlwt.Pattern.SOLID_PATTERN pattern.pattern_fore_colour = xlwt.Style.colour_map['ice_blue'] style2db = xlwt.XFStyle() style2db.num_format_str = '#,##0.00' style2db.pattern = pattern style0db = xlwt.XFStyle() style0db.num_format_str = '#,##0' style0db.pattern = pattern ws = wb.add_sheet('Powermatch') xl_lens = {} row = 0 col = 0 ws.write(row, col, 'Hourly Shortfall Table', styleb) row += 1 shrt_cols = ['Hour', 'Period', 'Shortfall'] for i in range(len(shrt_cols)): ws.write(row, col + i, shrt_cols[i], styleb) xl_lens[col + i] = 0 row += 1 for i in range(len(shortstuff)): ws.write(row, col, shortstuff[i].hour) ws.write(row, col + 1, shortstuff[i].period) xl_lens[col + 1] = max(xl_lens[col + 1], len(shortstuff[i].period)) ws.write(row, col + 2, shortstuff[i].shortfall, style2db) row += 1 row = 0 col = len(shrt_cols) + 1 ws.write(row, col, 'Generation Summary Table', styleb) sum_cols = ['Name', 'Technology', 'Capacity (MW)', 'CF', 'Generated\n(to be\ncosted)'] if getattr(self.power_summary[0], 'transmitted') != None: sum_cols.append('Transmitted\n(reduces\nShortfall)') for i in range(len(sum_cols)): ws.write(1, col + i, sum_cols[i], styleb) j = sum_cols[i].find('\n') - 1 if j < 0: j = len(sum_cols[i]) xl_lens[col + i] = j for key, value in iter(stns.items()): techs[self.power_summary[value].technology][0] += self.power_summary[value].capacity techs[self.power_summary[value].technology][1] += self.power_summary[value].generation if self.power_summary[value].transmitted is not None: techs[self.power_summary[value].technology][2] += self.power_summary[value].transmitted total = [0., 0., 0.] row = 2 ws.write(row, col, 'Totals', styleb) row += 1 for key, value in iter(sorted(techs.items())): ws.write(row, col + 1, key) ws.write(row, col + 2, value[0], style2db) total[0] += value[0] ws.write(row, col + 4, value[1], style0db) total[1] += value[1] if value[2] > 0: ws.write(row, col + 5, value[2], style0d) total[2] += value[2] row += 1 ws.write(row, col + 1, 'Total', styleb) ws.write(row, col + 2, total[0], style2db) ws.write(row, col + 4, total[1], style0db) if total[2] > 0: ws.write(row, col + 5, total[2], style0d) row += 1 ws.write(row, col, 'Stations', styleb) row += 1 for key, value in iter(sorted(stns.items())): ws.write(row, col, self.power_summary[value].name) xl_lens[col] = max(xl_lens[col], len(self.power_summary[value].name)) ws.write(row, col + 1, self.power_summary[value].technology) xl_lens[col + 1] = max(xl_lens[col + 1], len(self.power_summary[value].technology)) ws.write(row, col + 2, self.power_summary[value].capacity, style2d) if self.power_summary[value].generation > 0: ws.write(row, col + 3, self.power_summary[value].generation / (self.power_summary[value].capacity * 8760), style2d) ws.write(row, col + 4, self.power_summary[value].generation, style0d) if self.power_summary[value].transmitted is not None: ws.write(row, col + 5, self.power_summary[value].transmitted, style0d) row += 1 for key in xl_lens: if xl_lens[key] * 275 > ws.col(key).width: ws.col(key).width = xl_lens[key] * 275 ws.row(1).height_mismatch = True ws.row(1).height = 256 * 3 ws.set_panes_frozen(True) ws.set_horz_split_pos(2) ws.set_remove_splits(True) wb.save(data_file) def saveMatch(self, shortstuff): def cell_format(cell, new_cell): if cell.has_style: new_cell.number_format = cell.number_format # for i in range(len(shortstuff)): # ws.write(row, col, shortstuff[i].hour) ts = oxl.load_workbook(self.pm_template) ws = ts.active type_tags = ['name', 'tech', 'cap', 'cf', 'gen', 'tmit', 'hrly'] tech_tags = ['load', 'wind', 'offw', 'roof', 'fixed', 'single', 'dual', 'biomass', 'geotherm', 'other1', 'cst'] tech_names = ['Load', 'Wind', 'Offshore Wind', 'Rooftop PV', 'Fixed PV', 'Tracking PV', 'Dual Axis PV', 'Biomass', 'Geothermal', 'Other1', 'CST'] tech_names2 = [''] * len(tech_names) tech_names2[tech_names.index('Wind')] = 'Onshore Wind' tech_names2[tech_names.index('Tracking PV')] = 'Dual Axis PV' tech_names2[tech_names.index('CST')] = 'Solar Thermal' st_row = [] st_col = [] tech_row = [] tech_col = [] for i in range(len(type_tags)): st_row.append(0) st_col.append(0) for j in range(len(tech_tags)): tech_row.append([]) tech_col.append([]) for i in range(len(type_tags)): tech_row[-1].append(0) tech_col[-1].append(0) per_row = [0, 0] per_col= [0, 0] for row in range(1, ws.max_row + 1): for col in range(1, ws.max_column + 1): try: if ws.cell(row=row, column=col).value[0] != '<': continue if ws.cell(row=row, column=col).value == '<title>': titl = '' for stn in self.stations: if stn.scenario not in titl: titl += stn.scenario + '; ' try: titl = titl[:-2] titl = titl.replace('.xls', '') ws.cell(row=row, column=col).value = titl except: ws.cell(row=row, column=col).value = None elif ws.cell(row=row, column=col).value == '<date>': dte = QtCore.QDateTime.toString(QtCore.QDateTime.currentDateTime(), 'yyyy-MM-dd hh:mm') ws.cell(row=row, column=col).value = dte elif ws.cell(row=row, column=col).value == '<period>': per_row[1] = row per_col[1] = col ws.cell(row=row, column=col).value = None elif ws.cell(row=row, column=col).value == '<hour>': per_row[0] = row per_col[0] = col ws.cell(row=row, column=col).value = None elif ws.cell(row=row, column=col).value == '<year>': ws.cell(row=row, column=col).value = str(self.base_year) elif ws.cell(row=row, column=col).value == '<growth>': if self.load_multiplier != 0: ws.cell(row=row, column=col).value = self.load_multiplier else: ws.cell(row=row, column=col).value = None elif ws.cell(row=row, column=col).value[:5] == '<stn_': bit = ws.cell(row=row, column=col).value[:-1].split('_') ty = type_tags.index(bit[-1]) st_row[ty] = row st_col[ty] = col ws.cell(row=row, column=col).value = None elif ws.cell(row=row, column=col).value.find('_') > 0: bit = ws.cell(row=row, column=col).value[1:-1].split('_') te = tech_tags.index(bit[0]) ty = type_tags.index(bit[-1]) tech_row[te][ty] = row tech_col[te][ty] = col ws.cell(row=row, column=col).value = None except: pass data_file = 'Powermatch_data_%s.xlsx' % ( QtCore.QDateTime.toString(QtCore.QDateTime.currentDateTime(), 'yyyy-MM-dd_hhmm')) data_file = QtWidgets.QFileDialog.getSaveFileName(None, 'Save Powermatch data file', self.scenarios + data_file, 'Excel Files (*.xlsx)')[0] if data_file == '': return if data_file[-5:] != '.xlsx': data_file += '.xlsx' if os.path.exists(data_file): if os.path.exists(data_file + '~'): os.remove(data_file + '~') os.rename(data_file, data_file + '~') stns = {} techs = {} for i in range(len(self.power_summary)): stns[self.power_summary[i].name] = i techs[self.power_summary[i].technology] = [0., 0., 0.] st = 0 for key, value in iter(sorted(stns.items())): ws.cell(row=st_row[0] + st, column=st_col[0]).value = self.power_summary[value].name ws.cell(row=st_row[1] + st, column=st_col[1]).value = self.power_summary[value].technology ws.cell(row=st_row[2] + st, column=st_col[2]).value = self.power_summary[value].capacity if self.power_summary[value].generation > 0: ws.cell(row=st_row[3] + st, column=st_col[3]).value = self.power_summary[value].generation / \ (self.power_summary[value].capacity * 8760) ws.cell(row=st_row[4] + st, column=st_col[4]).value = self.power_summary[value].generation if self.power_summary[value].transmitted is not None: ws.cell(row=st_row[5] + st, column=st_col[5]).value = self.power_summary[value].transmitted st += 1 for key, value in iter(stns.items()): techs[self.power_summary[value].technology][0] += self.power_summary[value].capacity techs[self.power_summary[value].technology][1] += self.power_summary[value].generation if self.power_summary[value].transmitted is not None: techs[self.power_summary[value].technology][2] += self.power_summary[value].transmitted for key, value in iter(techs.items()): try: te = tech_names.index(key) except: try: te = tech_names2.index(key) except: continue ws.cell(row=tech_row[te][2], column=tech_col[te][2]).value = value[0] ws.cell(row=tech_row[te][4], column=tech_col[te][4]).value = value[1] if self.plots['grid_losses']: ws.cell(row=tech_row[te][5], column=tech_col[te][5]).value = value[2] if value[1] > 0: ws.cell(row=tech_row[te][3], column=tech_col[te][3]).value = \ value[1] / (value[0] * 8760) if per_row[0] > 0: for i in range(8760): ws.cell(row=per_row[0] + i, column=per_col[0]).value = shortstuff[i].hour cell_format(ws.cell(row=per_row[0], column=per_col[0]), ws.cell(row=per_row[0] + i, column=per_col[0])) if per_row[1] > 0: for i in range(8760): ws.cell(row=per_row[1] + i, column=per_col[1]).value = shortstuff[i].period cell_format(ws.cell(row=per_row[1], column=per_col[1]), ws.cell(row=per_row[1] + i, column=per_col[1])) if tech_row[0][6] > 0: for i in range(8760): ws.cell(row=tech_row[0][6] + i, column=tech_col[0][6]).value = shortstuff[i].load cell_format(ws.cell(row=tech_row[0][6], column=tech_col[0][6]), ws.cell(row=tech_row[0][6] + i, column=tech_col[0][6])) ly_keys = [] for t in range(len(tech_names)): ly_keys.append([]) if self.plots['by_station']: for t in range(len(self.stn_tech)): try: i = tech_names.index(self.stn_tech[t]) except: try: i = tech_names2.index(self.stn_tech[t]) except: continue ly_keys[i].append(self.stn_outs[t]) else: for t in range(len(tech_names)): if tech_names[t] in list(techs.keys()): ly_keys[t].append(tech_names[t]) if tech_names2[t] != '': if tech_names2[t] in list(techs.keys()): ly_keys[t].append(tech_names2[t]) for te in range(len(tech_row)): if tech_row[te][6] == 0 or len(ly_keys[te]) == 0: continue hrly = [0.] * 8760 doit = False for key in ly_keys[te]: try: values = self.ly[key] for h in range(len(hrly)): hrly[h] += values[h] if hrly[h] != 0: doit = True except: pass if doit or not doit: for h in range(len(hrly)): ws.cell(row=tech_row[te][6] + h, column=tech_col[te][6]).value = hrly[h] cell_format(ws.cell(row=tech_row[te][6], column=tech_col[te][6]), \ ws.cell(row=tech_row[te][6] + h, column=tech_col[te][6])) ts.save(data_file) config = configparser.RawConfigParser() if len(sys.argv) > 1: config_file = sys.argv[1] else: config_file = getModelFile('SIREN.ini') config.read(config_file) try: mapc = config.get('Map', 'map_choice') except: mapc = '' self.colours = {'cumulative': '#006400', 'gross_load': '#a9a9a9', 'load': '#000000', 'shortfall': '#8b0000', 'wind': '#6688bb'} try: colors = config.items('Colors') for item, colour in colors: if item in self.technologies or item in self.colours: itm = techClean(item) self.colours[itm] = colour except: pass if mapc != '': try: colors = config.items('Colors' + mapc) for item, colour in colors: if item in self.technologies or item in self.colours: itm = techClean(item) self.colours[itm] = colour except: pass papersizes = {'a0': '33.1,46.8', 'a1': '23.4,33.1', 'a2': '16.5,23.4', 'a3': '11.7,16.5', 'a4': '8.3,11.7', 'a5': '5.8,8.3', 'a6': '4.1,5.8', 'a7': '2.9,4.1', 'a8': '2,2.9', 'a9': '1.5,2', 'a10': '1,1.5', 'b0': '39.4,55.7', 'b1': '27.8,39.4', 'b2': '19.7,27.8', 'b3': '13.9,19.7', 'b4': '9.8,13.9', 'b5': '6.9,9.8', 'b6': '4.9,6.9', 'b7': '3.5,4.9', 'b8': '2.4,3.5', 'b9': '1.7,2.4', 'b10': '1.2,1.7', 'foolscap': '8.0,13.0', 'ledger': '8.5,14.0', 'legal': '8.5,14.09', 'letter': '8.5,11.0'} landscape = False papersize = '' self.other_width = 2. seasons = [] periods = [] try: items = config.items('Power') except: seasons = [[], [], [], []] seasons[0] = ['Summer', 11, 0, 1] seasons[1] = ['Autumn', 2, 3, 4] seasons[2] = ['Winter', 5, 6, 7] seasons[3] = ['Spring', 8, 9, 10] periods = [[], []] periods[0] = ['Winter', 4, 5, 6, 7, 8, 9] periods[1] = ['Summer', 10, 11, 0, 1, 2, 3] for item, values in items: if item[:6] == 'season': if item == 'season': continue i = int(item[6:]) - 1 if i >= len(seasons): seasons.append([]) seasons[i] = values.split(',') for j in range(1, len(seasons[i])): seasons[i][j] = int(seasons[i][j]) - 1 elif item[:6] == 'period': if item == 'period': continue i = int(item[6:]) - 1 if i >= len(periods): periods.append([]) periods[i] = values.split(',') for j in range(1, len(periods[i])): periods[i][j] = int(periods[i][j]) - 1 elif item == 'other_width': try: self.other_width = float(values) except: pass elif item == 'save_format': plt.rcParams['savefig.format'] = values elif item == 'figsize': try: papersize = papersizes[values] except: papersize = values elif item == 'orientation': if values.lower()[0] == 'l': landscape = True elif item == 'debug_sam': if values.lower() in ['true', 'yes', 'on']: self.debug = True else: self.debug = False if papersize != '': if landscape: bit = papersize.split(',') plt.rcParams['figure.figsize'] = bit[1] + ',' + bit[0] else: plt.rcParams['figure.figsize'] = papersize try: self.pm_template = config.get('Power', 'pm_template') except: try: self.pm_template = config.get('Files', 'pm_template') except: self.pm_template = False if self.pm_template: try: parents = getParents(config.items('Parents')) for key, value in parents: self.pm_template = self.pm_template.replace(key, value) self.pm_template = self.pm_template.replace('$USER$', getUser()) if not os.path.exists(self.pm_template): self.pm_template = False except: pass mth_labels = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] ssn_labels = [] for i in range(len(seasons)): if len(seasons[i]) == 2: ssn_labels.append('%s (%s)' % (seasons[i][0], mth_labels[seasons[i][1]])) else: ssn_labels.append('%s (%s-%s)' % (seasons[i][0], mth_labels[seasons[i][1]], mth_labels[seasons[i][-1]])) smp_labels = [] for i in range(len(periods)): if len(periods[i]) == 2: smp_labels.append('%s (%s)' % (periods[i][0], mth_labels[periods[i][1]])) else: smp_labels.append('%s (%s-%s)' % (periods[i][0], mth_labels[periods[i][1]], mth_labels[periods[i][-1]])) labels = ['0:00', '4:00', '8:00', '12:00', '16:00', '20:00', '24:00'] mth_xlabels = ['0:', '4:', '8:', '12:', '16:', '20:', '24:'] pct_labels = ['0%', '20%', '40%', '60%', '80%', '100%'] m = 0 d = 1 day_labels = [] while m < len(the_days): day_labels.append('%s %s' % (str(d), mth_labels[m])) d += 7 if d > the_days[m]: d = d - the_days[m] m += 1 lbl_font = FontProperties() lbl_font.set_size('small') x24 = [] l24 = {} m24 = {} q24 = {} s24 = {} d365 = {} for i in range(24): x24.append(i + 1) for key in list(ydata.keys()): if self.plots['total']: l24[key] = [] for j in range(24): l24[key].append(0.) if self.plots['month'] or self.plots['by_month']: m24[key] = [] for m in range(12): m24[key].append([]) for j in range(24): m24[key][m].append(0.) if self.plots['season'] or self.plots['by_season']: q24[key] = [] for q in range(len(seasons)): q24[key].append([]) for j in range(24): q24[key][q].append(0.) if self.plots['period'] or self.plots['by_period']: s24[key] = [] for s in range(len(periods)): s24[key].append([]) for j in range(24): s24[key][s].append(0.) if self.plots['by_day']: d365[key] = [] for j in range(365): d365[key].append([0.]) the_qtrs = [] for i in range(len(seasons)): d = 0 for j in range(1, len(seasons[i])): d += the_days[seasons[i][j]] the_qtrs.append(d) the_ssns = [] for i in range(len(periods)): d = 0 for j in range(1, len(periods[i])): d += the_days[periods[i][j]] the_ssns.append(d) the_hours = [0] i = 0 for m in range(len(the_days)): i = i + the_days[m] * 24 the_hours.append(i) d = -1 for i in range(0, len(x), 24): m = 11 d += 1 while i < the_hours[m] and m > 0: m -= 1 for k in range(24): for key, value in iter(sorted(ydata.items())): if key == 'Generation': continue if self.plots['total']: l24[key][k] += value[i + k] if self.plots['by_day']: d365[key][d][0] += value[i + k] if self.plots['month'] or self.plots['by_month']: m24[key][m][k] = m24[key][m][k] + value[i + k] if self.plots['season'] or self.plots['by_season']: for q in range(len(seasons)): if m in seasons[q]: break q24[key][q][k] = q24[key][q][k] + value[i + k] if self.plots['period'] or self.plots['by_period']: for s in range(len(periods)): if m in periods[s]: break s24[key][s][k] = s24[key][s][k] + value[i + k] if self.plots['cumulative']: pc = 1 else: pc = 0 if self.plots['gross_load']: pc += 1 if self.plots['shortfall']: pc += 1 colours = ['r', 'g', 'b', 'c', 'm', 'y', 'orange', 'darkcyan', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkturquoise', 'darkviolet', 'violet'] colour_index = 0 linestyles = ['-', '--', '-.', ':'] line_index = 0 self.linestyle = {} for key in self.colours: self.linestyle[key] = '-' for key in ydata: if key not in self.colours: if key[:4] == 'Load': try: self.colours[key] = self.colours['load'] except: self.colours[key] = 'black' self.linestyle[key] = '-' else: self.colours[key] = colours[colour_index] self.linestyle[key] = linestyles[line_index] colour_index += 1 if colour_index >= len(colours): colour_index = 0 line_index += 1 if line_index >= len(linestyles): line_index = 0 if self.plots['by_day']: stepPlot(self, 'day', d365, day_labels) if self.plots['by_month']: stepPlot(self, 'month', m24, mth_labels) if self.plots['by_season']: stepPlot(self, 'season', q24, ssn_labels) if self.plots['by_period']: stepPlot(self, 'period', s24, smp_labels) for key in list(ydata.keys()): for k in range(24): if self.plots['total']: l24[key][k] = l24[key][k] / 365 if self.plots['month']: for m in range(12): m24[key][m][k] = m24[key][m][k] / the_days[m] if self.plots['season']: for q in range(len(seasons)): q24[key][q][k] = q24[key][q][k] / the_qtrs[q] if self.plots['period']: for s in range(len(periods)): s24[key][s][k] = s24[key][s][k] / the_ssns[s] if self.plots['hour']: if self.plots['save_plot']: sp_vals = ['hour'] sp_data = [] sp_data.append(x[1:]) sp_data[-1].append(len(x)) sp_vals.append('period') sp_data.append([]) for i in range(len(x)): sp_data[-1].append(the_date(self.load_year, i)) hdr = self.hdrs['hour'].replace('Power - ', '') fig = plt.figure(hdr + self.suffix) plt.grid(True) hx = fig.add_subplot(111) plt.title(self.hdrs['hour'] + self.suffix) maxy = 0 storage = None if self.plots['cumulative']: cumulative = [] for i in range(len(x)): cumulative.append(0.) if self.plots['gross_load']: gross_load = [] for i in range(len(x)): gross_load.append(0.) if self.plots['show_pct']: load_sum = 0. gen_sum = 0. for key, value in iter(sorted(ydata.items())): if key == 'Generation': continue if self.plots['show_pct']: for i in range(len(x)): if key[:4] == 'Load': load_sum += value[i] elif key == 'Storage': pass else: gen_sum += value[i] if self.plots['gross_load'] and key == 'Existing Rooftop PV': load_sum += value[i] maxy = max(maxy, max(value)) lw = self.other_width if self.plots['cumulative'] and key[:4] != 'Load' and key != 'Storage': lw = 1.0 for i in range(len(x)): cumulative[i] += value[i] if self.plots['gross_load'] and (key[:4] == 'Load' or key == 'Existing Rooftop PV'): for i in range(len(x)): gross_load[i] += value[i] if self.plots['shortfall'] and key[:4] == 'Load': load = value if self.plots['shortfall'] and key == 'Storage': storage = value hx.plot(x, value, linewidth=lw, label=shrinkKey(key), color=self.colours[key], linestyle=self.linestyle[key]) if self.plots['save_plot']: sp_vals.append(shrinkKey(key)) sp_data.append(value) if self.plots['cumulative']: hx.plot(x, cumulative, linewidth=self.other_width, label='Tot. Generation', color=self.colours['cumulative']) maxy = max(maxy, max(cumulative)) if self.plots['save_plot']: sp_vals.append('Tot. Generation') sp_data.append(cumulative) if self.plots['gross_load'] and 'Existing Rooftop PV' in list(ydata.keys()): hx.plot(x, gross_load, linewidth=1.0, label='Gross Load', color=self.colours['gross_load']) maxy = max(maxy, max(gross_load)) if self.plots['save_plot']: sp_vals.append('Gross Load') sp_data.append(gross_load) try: rndup = pow(10, round(log10(maxy * 1.5) - 1)) / 2 maxy = ceil(maxy / rndup) * rndup except: rndup = 0 if self.plots['shortfall'] and self.do_load: load2 = [] if storage is None: for i in range(len(cumulative)): load2.append(cumulative[i] - load[i]) else: for i in range(len(cumulative)): load2.append(cumulative[i] + storage[i] - load[i]) hx.plot(x, load2, linewidth=self.other_width, label='Shortfall', color=self.colours['shortfall']) plt.axhline(0, color='black') miny = min(load2) if rndup != 0 and miny < 0: miny = -ceil(-miny / rndup) * rndup if self.plots['save_plot']: sp_vals.append('Shortfall') sp_data.append(load2) else: miny = 0 if self.plots['save_plot']: titl = 'Hour' decpts = [3] * len(sp_vals) decpts[0] = decpts[1] = 0 dialog = displaytable.Table(list(map(list, list(zip(*sp_data)))), title=titl, fields=sp_vals, save_folder=self.scenarios, decpts=decpts) dialog.exec_() del dialog, sp_data, sp_vals plt.ylim([miny, maxy]) plt.xlim([0, len(x)]) if (len(ydata) + pc) > 9: # Shrink current axis by 5% box = hx.get_position() hx.set_position([box.x0, box.y0, box.width * 0.95, box.height]) # Put a legend to the right of the current axis hx.legend(loc='center left', bbox_to_anchor=(1, 0.5), prop=lbl_font) else: hx.legend(bbox_to_anchor=[0.5, -0.1], loc='center', ncol=(len(ydata) + pc), prop=lbl_font) plt.xticks(list(range(12, len(x), 168))) hx.set_xticklabels(day_labels, rotation='vertical') hx.set_xlabel('Month of the year') hx.set_ylabel('Power (MW)') if self.plots['show_pct']: self.gen_pct = ' (%s%% of load)' % '{:0,.1f}'.format(gen_sum * 100. / load_sum) plt.title(self.hdrs['hour'] + self.suffix + self.gen_pct) if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) zp = ZoomPanX() f = zp.zoom_pan(hx, base_scale=1.2) # enable scrollable zoom if self.plots['block']: plt.show(block=True) else: plt.draw() del zp if self.plots['augment'] and self.do_load: hdr = self.hdrs['augment'].replace('Power - ', '') fig = plt.figure(hdr + self.suffix) plt.grid(True) hx = fig.add_subplot(111) plt.title(self.hdrs['augment'] + self.suffix) maxy = 0 miny = 0 storage = None cumulative = [] for i in range(len(x)): cumulative.append(0.) if self.plots['gross_load']: gross_load = [] for i in range(len(x)): gross_load.append(0.) if self.plots['show_pct']: load_sum = 0. gen_sum = 0. for key, value in iter(sorted(ydata.items())): if key == 'Generation' or key == 'Excess': # might need to keep excess continue if self.plots['show_pct']: for i in range(len(x)): if key[:4] == 'Load': load_sum += value[i] elif key == 'Storage': pass else: gen_sum += value[i] if self.plots['gross_load'] and key == 'Existing Rooftop PV': load_sum += value[i] maxy = max(maxy, max(value)) lw = self.other_width if key[:4] != 'Load' and key != 'Storage': for i in range(len(x)): cumulative[i] += value[i] if self.plots['gross_load'] and (key[:4] == 'Load' or key == 'Existing Rooftop PV'): for i in range(len(x)): gross_load[i] += value[i] if key[:4] == 'Load': load = value if key == 'Storage': storage = value maxy = max(maxy, max(cumulative)) try: rndup = pow(10, round(log10(maxy * 1.5) - 1)) / 2 maxy = ceil(maxy / rndup) * rndup except: rndup = 0 regen = cumulative[:] for r in range(len(regen)): if regen[r] > load[r]: regen[r] = load[r] hx.fill_between(x, 0, regen, color=self.colours['cumulative']) #'#004949') if storage is not None: for r in range(len(storage)): storage[r] += cumulative[r] for r in range(len(storage)): if storage[r] > load[r]: storage[r] = load[r] hx.fill_between(x, regen, storage, color=self.colours['wind']) #'#006DDB') hx.fill_between(x, storage, load, color=self.colours['shortfall']) #'#920000') else: hx.fill_between(x, load, regen, color=self.colours['shortfall']) #'#920000') hx.plot(x, cumulative, linewidth=self.other_width, label='RE', linestyle='--', color=self.colours['gross_load']) if self.plots['save_plot']: sp_vals = ['hour'] sp_data = [] sp_tots = [''] sp_pts = [0] sp_data.append(x[1:]) sp_data[-1].append(len(x)) sp_vals.append('period') sp_data.append([]) sp_tots.append('') sp_pts.append(0) for i in range(len(x)): sp_data[-1].append(the_date(self.load_year, i)) sp_vals.append('load') sp_data.append(load) l = len(sp_data) - 1 sp_tots.append(0.) sp_pts.append(4) for ld in load: sp_tots[l] += ld sp_vals.append('renewable') sp_data.append(regen) r = len(sp_data) - 1 sp_tots.append(0.) sp_pts.append(4) for re in regen: sp_tots[-1] += re if storage is not None: sp_vals.append('storage') sp_data.append(storage) s = len(sp_data) - 1 sp_tots.append(0.) sp_pts.append(4) else: s = 0 sp_vals.append('re gen.') sp_data.append(cumulative) e = len(sp_data) - 1 sp_tots.append(0.) sp_pts.append(4) titl = 'Augmented' dialog = displaytable.Table(list(map(list, list(zip(*sp_data)))), title=titl, fields=sp_vals, save_folder=self.scenarios, decpts=sp_pts) dialog.exec_() del dialog if s > 0: for i in range(len(sp_data[s])): sp_data[s][i] = sp_data[s][i] - sp_data[r][i] sp_tots[s] += sp_data[s][i] sp_vals.append('excess') sp_vals[e] = 'augment' sp_data.append([]) sp_tots.append(0.) sp_pts.append(4) for i in range(len(sp_data[r])): sp_data[-1].append(sp_data[e][i] - sp_data[r][i]) sp_tots[-1] += sp_data[-1][i] if s > 0: for i in range(len(sp_data[e])): sp_data[e][i] = sp_data[l][i] - sp_data[r][i] - sp_data[s][i] sp_tots[e] += sp_data[e][i] sp_data[-1][i] -= sp_data[s][i] sp_tots[-1] -= sp_data[s][i] else: for i in range(len(sp_data[e])): sp_data[e][i] = sp_data[l][i] - sp_data[r][i] sp_tots[e] += sp_data[e][i] titl = 'augmented2' dialog = displaytable.Table(list(map(list, list(zip(*sp_data)))), title=titl, fields=sp_vals, save_folder=self.scenarios, decpts=sp_pts) dialog.exec_() fields = ['row', 'component', 'MWh', 'Load %'] values = [] sp_pts = [0, 0, 4, 1] for i in range(2, len(sp_vals)): values.append([i - 1, sp_vals[i].title(), sp_tots[i], 0.]) values[-1][-1] = (sp_tots[i] * 100.) / sp_tots[l] titl = 'augmented3' dialog = displaytable.Table(values, fields=fields, title=titl, save_folder=self.scenarios, decpts=sp_pts) dialog.exec_() del dialog, sp_vals, sp_data, sp_tots plt.ylim([miny, maxy]) plt.xlim([0, len(x)]) plt.xticks(list(range(12, len(x), 168))) hx.set_xticklabels(day_labels, rotation='vertical') hx.set_xlabel('Month of the year') hx.set_ylabel('Power (MW)') zp = ZoomPanX() f = zp.zoom_pan(hx, base_scale=1.2) # enable scrollable zoom if self.plots['show_pct']: self.gen_pct = ' (%s%% of load)' % '{:0,.1f}'.format(gen_sum * 100. / load_sum) plt.title(self.hdrs['hour'] + self.suffix + self.gen_pct) if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) if self.plots['block']: plt.show(block=True) else: plt.draw() del zp # shortstuff = [] # vals = ['load', 'renewable', 'storage', 'cumulative'] # for i in range(0, len(load)): # if storage is None: # shortstuff.append(ColumnData(i + 1, the_date(self.load_year, i), [load[i], # regen[i], 0., cumulative[i]], values=vals)) # else: # shortstuff.append(ColumnData(i + 1, the_date(self.load_year, i), [load[i], # regen[i], storage[i], cumulative[i]], values=vals)) # vals.insert(0, 'period') # vals.insert(0, 'hour') # dialog = displaytable.Table(shortstuff, title='Augmented', # save_folder=self.scenarios, fields=vals) # dialog.exec_() # del dialog if self.plots['duration']: hdr = self.hdrs['duration'].replace('Power - ', '') fig = plt.figure(hdr + self.suffix) plt.grid(True) dx = fig.add_subplot(111) plt.title(self.hdrs['duration'] + self.suffix) maxy = 0 if self.plots['cumulative']: cumulative = [] for i in range(len(x)): cumulative.append(0.) if self.plots['show_pct']: load_sum = 0. gen_sum = 0. for key, value in iter(sorted(ydata.items())): if key == 'Generation': continue if self.plots['show_pct']: for i in range(len(x)): if key[:4] == 'Load': load_sum += value[i] elif key == 'Storage': pass else: gen_sum += value[i] if self.plots['gross_load'] and key == 'Existing Rooftop PV': load_sum += value[i] sortydata = sorted(value, reverse=True) lw = self.other_width if self.plots['cumulative'] and key[:4] != 'Load' and key != 'Storage': lw = 1.0 for i in range(len(x)): cumulative[i] += value[i] maxy = max(maxy, max(sortydata)) dx.plot(x, sortydata, linewidth=lw, label=shrinkKey(key), color=self.colours[key], linestyle=self.linestyle[key]) if self.plots['cumulative']: sortydata = sorted(cumulative, reverse=True) dx.plot(x, sortydata, linewidth=self.other_width, label='Tot. Generation', color=self.colours['cumulative']) try: rndup = pow(10, round(log10(maxy * 1.5) - 1)) / 2 maxy = ceil(maxy / rndup) * rndup except: pass plt.ylim([0, maxy]) plt.xlim([0, len(x)]) if (len(ydata) + pc) > 9: # Shrink current axis by 10% box = dx.get_position() dx.set_position([box.x0, box.y0, box.width * 0.95, box.height]) # Put a legend to the right of the current axis dx.legend(loc='center left', bbox_to_anchor=(1, 0.5), prop=lbl_font) else: dx.legend(bbox_to_anchor=[0.5, -0.1], loc='center', ncol=(len(ydata) + pc), prop=lbl_font) tics = int(len(x) / (len(pct_labels) - 1)) plt.xticks(list(range(0, len(x), tics))) dx.set_xticklabels(pct_labels) dx.set_xlabel('Percentage of Year') dx.set_ylabel('Power (MW)') if self.plots['show_pct']: self.gen_pct = ' (%s%% of load)' % '{:0,.1f}'.format(gen_sum * 100. / load_sum) plt.title(self.hdrs['duration'] + self.suffix + self.gen_pct) if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) if self.plots['block']: plt.show(block=True) else: plt.draw() if self.do_load: hdr = self.hdrs['duration'].replace('Power - ', '') # fig = plt.figure(hdr + self.suffix) plt.figure(hdr + ' 2') plt.grid(True) plt.title(self.hdrs['duration'] + ' with renewable contribution') lx = plt.subplot(111) maxy = 0 miny = 0 load = [] # use for this and next graph rgen = [] # use for this and next graph rgendiff = [] for i in range(len(self.x)): rgen.append(0.) rgendiff.append(0.) if self.plots['show_pct']: load_sum = 0. gen_sum = 0. for key, value in ydata.items(): if key == 'Generation': continue if self.plots['show_pct']: for i in range(len(value)): if key[:4] == 'Load': load_sum += value[i] elif key == 'Storage': pass else: gen_sum += value[i] if self.plots['gross_load'] and key == 'Existing Rooftop PV': load_sum += value[i] if key[:4] == 'Load': load = value else: for i in range(len(value)): rgen[i] += value[i] for i in range(len(load)): rgendiff[i] = load[i] - rgen[i] sortly1 = sorted(load, reverse=True) maxy = max(maxy, max(load)) maxy = max(maxy, max(rgendiff)) miny = min(miny, min(rgendiff)) try: rndup = pow(10, round(log10(maxy * 1.5) - 1)) / 2 maxy = ceil(maxy / rndup) * rndup miny = -ceil(-miny / rndup) * rndup except: pass if self.load_multiplier != 0: load_key = 'Load ' + self.load_year else: load_key = 'Load' lx.plot(x, sortly1, linewidth=self.other_width, label=load_key) sortly2 = sorted(rgendiff, reverse=True) lx.plot(x, sortly2, linewidth=self.other_width, label='Tot. Generation') lx.fill_between(x, sortly1, sortly2, facecolor=self.colours['cumulative']) plt.ylim([miny, maxy]) plt.xlim([0, len(x)]) lx.legend(bbox_to_anchor=[0.5, -0.1], loc='center', ncol=2, prop=lbl_font) tics = int(len(x) / (len(pct_labels) - 1)) plt.xticks(list(range(0, len(x), tics))) lx.set_xticklabels(pct_labels) lx.set_xlabel('Percentage of Year') lx.set_ylabel('Power (MW)') lx.axhline(0, color='black') if self.plots['show_pct']: self.gen_pct = ' (%s%% of load)' % '{:0,.1f}'.format(gen_sum * 100. / load_sum) plt.title(self.hdrs['duration'] + ' with renewable contribution' + self.gen_pct) if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) if self.plots['block']: plt.show(block=True) else: plt.draw() if not self.plots['block']: plt.show(block=True) if (self.plots['shortfall_detail'] or self.plots['save_match']) and self.do_load: load = [] rgen = [] shortfall = [[], [], [], []] generation = [] for i in range(len(self.x)): rgen.append(0.) shortfall[0].append(0.) for key, value in ydata.items(): if key == 'Generation': generation = value elif key[:4] == 'Load': load = value else: for i in range(len(value)): rgen[i] += value[i] shortfall[0][0] = rgen[0] - load[0] for i in range(1, len(load)): shortfall[0][i] = shortfall[0][i - 1] + rgen[i] - load[i] d_short = [[], [0], [0, 0]] for i in range(0, len(load), 24): d_short[0].append(0.) for j in range(i, i + 24): d_short[0][-1] += rgen[i] - load[i] if self.iterations > 0: for i in range(1, len(d_short[0])): d_short[1].append((d_short[0][i - 1] + d_short[0][i]) / 2) for i in range(2, len(d_short[0])): d_short[2].append((d_short[0][i - 2] + d_short[0][i - 1] + d_short[0][i]) / 3) d_short[1][0] = d_short[1][1] d_short[2][0] = d_short[2][1] = d_short[2][2] shortstuff = [] vals = ['shortfall', 'iteration 1', 'iteration 2'] for i in range(len(d_short[0])): shortstuff.append(DailyData(i + 1, the_date(self.load_year, i * 24)[:10], [d_short[0][i], d_short[1][i], d_short[2][i]], values=vals)) vals.insert(0, 'date') vals.insert(0, 'day') dialog = displaytable.Table(shortstuff, title='Daily Shortfall', save_folder=self.scenarios, fields=vals) dialog.exec_() del dialog del shortstuff xs = [] for i in range(len(d_short[0])): xs.append(i + 1) plt.figure('daily shortfall') plt.grid(True) plt.title('Daily Shortfall') sdfx = plt.subplot(111) for i in range(self.iterations): sdfx.step(xs, d_short[i], linewidth=self.other_width, label=str(i + 1) + ' day average', color=colours[i]) plt.xticks(list(range(0, len(xs), 7))) tick_spot = [] for i in range(0, len(xs), 7): tick_spot.append(i + .5) sdfx.set_xticks(tick_spot) sdfx.set_xticklabels(day_labels, rotation='vertical') sdfx.set_xlabel('Day of the year') sdfx.set_ylabel('Power (MW)') plt.xlim([0, len(xs)]) sdfx.legend(loc='best') for i in range(len(d_short)): lin = min(d_short[i]) sdfx.axhline(lin, linestyle='--', color=colours[i]) lin = max(d_short[i]) sdfx.axhline(lin, linestyle='--', color=colours[i]) lin = sum(d_short[0]) / len(d_short[0]) sdfx.axhline(lin, linestyle='--', color='black') if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) plt.show(block=True) h_storage = [-(shortfall[0][-1] / len(shortfall[0]))] # average shortfall for s in range(1, self.iterations + 1): for i in range(len(self.x)): shortfall[s].append(0.) ctr = 0 still_short = [0, 0] if rgen[0] - load[0] + h_storage[-1] < 0: still_short[0] += rgen[0] - load[0] + h_storage[-1] ctr += 1 else: still_short[1] += rgen[0] - load[0] + h_storage[-1] shortfall[s][0] = rgen[0] - load[0] + h_storage[-1] for i in range(1, len(load)): shortfall[s][i] = shortfall[s][i - 1] + rgen[i] - load[i] + h_storage[-1] if rgen[i] - load[i] + h_storage[-1] < 0: still_short[0] += rgen[i] - load[i] + h_storage[-1] ctr += 1 else: still_short[1] += rgen[i] - load[i] + h_storage[-1] # h_storage.append(h_storage[-1] - still_short[0] / len(self.x)) h_storage.append(-(shortfall[0][-1] + still_short[0]) / len(self.x)) dimen = log10(fabs(shortfall[0][-1])) unit = 'MW' if dimen > 11: unit = 'PW' div = 9 elif dimen > 8: unit = 'TW' div = 6 elif dimen > 5: unit = 'GW' div = 3 else: div = 0 if div > 0: for s in range(self.iterations + 1): for i in range(len(shortfall[s])): shortfall[s][i] = shortfall[s][i] / pow(10, div) plt.figure('cumulative shortfall') plt.grid(True) plt.title('Cumulative Shortfall') sfx = plt.subplot(111) sfx.plot(x, shortfall[0], linewidth=self.other_width, label='Shortfall', color=self.colours['shortfall']) for s in range(1, self.iterations + 1): if h_storage[s - 1] > 1: amt = '{:0,.0f}'.format(h_storage[s - 1]) else: amt = '{:0,.1f}'.format(h_storage[s - 1]) lbl = 'iteration %s - add %s MW to generation' % (s, amt ) sfx.plot(x, shortfall[s], linewidth=self.other_width, label=lbl, color=colours[s]) plt.xticks(list(range(0, len(x), 168))) tick_spot = [] for i in range(0, len(x), 168): tick_spot.append(i + .5) box = sfx.get_position() sfx.set_position([box.x0, box.y0, box.width, box.height]) sfx.set_xticks(tick_spot) sfx.set_xticklabels(day_labels, rotation='vertical') plt.xlim([0, len(x)]) sfx.set_xlabel('Day of the year') sfx.set_ylabel('Power (' + unit + ')') sfx.legend(loc='best', prop=lbl_font) if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) plt.show(block=True) for i in range(0, len(load)): shortfall[0][i] = rgen[i] - load[i] for s in range(1, self.iterations + 1): for i in range(0, len(load)): shortfall[s][i] = rgen[i] - load[i] + h_storage[s - 1] plt.figure('shortfall') plt.grid(True) plt.title('Shortfall') sfx = plt.subplot(111) sfx.plot(x, shortfall[0], linewidth=self.other_width, label='Shortfall', color=self.colours['shortfall']) for s in range(1, self.iterations + 1): if h_storage[s - 1] > 1: amt = '{:0,.0f}'.format(h_storage[s - 1]) else: amt = '{:0,.1f}'.format(h_storage[s - 1]) lbl = 'iteration %s - add %s MW to generation' % (s, amt ) sfx.plot(x, shortfall[s], linewidth=self.other_width, label=lbl, color=colours[s]) plt.axhline(0, color='black') plt.xticks(list(range(0, len(x), 168))) tick_spot = [] for i in range(0, len(x), 168): tick_spot.append(i + .5) box = sfx.get_position() sfx.set_position([box.x0, box.y0, box.width, box.height]) sfx.set_xticks(tick_spot) sfx.set_xticklabels(day_labels, rotation='vertical') sfx.set_xlabel('Day of the year') sfx.set_ylabel('Power (MW)') plt.xlim([0, len(x)]) sfx.legend(loc='best', prop=lbl_font) if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) plt.show(block=True) else: for i in range(0, len(load)): shortfall[0][i] = rgen[i] - load[i] shortstuff = [] if self.plots['grid_losses']: vals = ['load', 'generation', 'transmitted', 'shortfall'] short2 = [shortfall[0][0]] for i in range(1, len(self.x)): # short2.append(shortfall[0][i] - shortfall[0][i - 1]) short2.append(shortfall[0][i]) for i in range(0, len(load)): shortstuff.append(ColumnData(i + 1, the_date(self.load_year, i), [load[i], generation[i], rgen[i], short2[i]], values=vals)) else: vals = ['load', 'generation', 'shortfall'] for i in range(0, len(load)): shortstuff.append(ColumnData(i + 1, the_date(self.load_year, i), [load[i], rgen[i], shortfall[0][i]], values=vals)) vals.insert(0, 'period') vals.insert(0, 'hour') if self.plots['shortfall_detail'] and self.plots['save_plot']: dialog = displaytable.Table(shortstuff, title='Hourly Shortfall', save_folder=self.scenarios, fields=vals) dialog.exec_() del dialog if self.plots['save_match']: if self.pm_template: saveMatch(self, shortstuff) else: saveBalance(self, shortstuff) del shortstuff if self.plots['total']: maxy = 0 if self.plots['cumulative']: cumulative = [] for i in range(len(x24)): cumulative.append(0.) if self.plots['gross_load']: gross_load = [] for i in range(len(x24)): gross_load.append(0.) if self.plots['save_plot']: sp_data = [] sp_data.append(x24) sp_vals = ['hour'] hdr = self.hdrs['total'].replace('Power - ', '') plt.figure(hdr + self.suffix) plt.grid(True) plt.title(self.hdrs['total'] + self.suffix) tx = plt.subplot(111) storage = None if self.plots['show_pct']: load_sum = 0. gen_sum = 0. for key in iter(sorted(l24.keys())): if key == 'Generation': continue if self.plots['show_pct']: for j in range(len(x24)): if key[:4] == 'Load': load_sum += l24[key][j] elif key == 'Storage': pass else: gen_sum += l24[key][j] if self.plots['gross_load'] and key == 'Existing Rooftop PV': load_sum += l24[key][j] maxy = max(maxy, max(l24[key])) lw = self.other_width if self.plots['cumulative'] and key[:4] != 'Load' and key != 'Storage': lw = 1.0 for j in range(len(x24)): cumulative[j] += l24[key][j] if self.plots['gross_load'] and (key[:4] == 'Load' or key == 'Existing Rooftop PV'): for j in range(len(x24)): gross_load[j] += l24[key][j] if self.plots['shortfall'] and key[:4] == 'Load': load = l24[key] if self.plots['shortfall'] and key == 'Storage': storage = l24[key] tx.plot(x24, l24[key], linewidth=lw, label=shrinkKey(key), color=self.colours[key], linestyle=self.linestyle[key]) if self.plots['save_plot']: sp_vals.append(key) sp_data.append(l24[key]) if self.plots['cumulative']: tx.plot(x24, cumulative, linewidth=self.other_width, label='Tot. Generation', color=self.colours['cumulative']) maxy = max(maxy, max(cumulative)) if self.plots['save_plot']: sp_vals.append('Tot. Generation') sp_data.append(cumulative) if self.plots['gross_load'] and 'Existing Rooftop PV' in list(ydata.keys()): tx.plot(x24, gross_load, linewidth=1.0, label='Gross Load', color=self.colours['gross_load']) maxy = max(maxy, max(gross_load)) if self.plots['save_plot']: sp_vals.append('Gross Load') sp_data.append(gross_load) try: rndup = pow(10, round(log10(maxy * 1.5) - 1)) / 2 maxy = ceil(maxy / rndup) * rndup except: pass if self.plots['shortfall'] and self.do_load: load2 = [] if storage is None: for i in range(len(cumulative)): load2.append(cumulative[i] - l24[self.load_key][i]) else: for i in range(len(cumulative)): load2.append(cumulative[i] + storage[i] - l24[self.load_key][i]) tx.plot(x24, load2, linewidth=self.other_width, label='Shortfall', color=self.colours['shortfall']) plt.axhline(0, color='black') miny = min(load2) if rndup != 0 and miny < 0: miny = -ceil(-miny / rndup) * rndup if self.plots['save_plot']: sp_vals.append('Shortfall') sp_data.append(load2) else: miny = 0 if self.plots['save_plot']: titl = 'Total' decpts = [3] * len(sp_vals) decpts[0] = 0 dialog = displaytable.Table(list(map(list, list(zip(*sp_data)))), title=titl, fields=sp_vals, save_folder=self.scenarios, decpts=decpts) dialog.exec_() del dialog, sp_data, sp_vals plt.ylim([miny, maxy]) plt.xlim([1, 25]) plt.xticks(list(range(0, 25, 4))) # tx.legend(loc='lower left', numpoints = 2, prop=lbl_font) tx.set_xticklabels(labels) tx.set_xlabel('Hour of the Day') tx.set_ylabel('Power (MW)') if (len(ydata) + pc) > 9: # Shrink current axis by 5% box = tx.get_position() tx.set_position([box.x0, box.y0, box.width * 0.95, box.height]) # Put a legend to the right of the current axis tx.legend(loc='center left', bbox_to_anchor=(1, 0.5), prop=lbl_font) else: tx.legend(bbox_to_anchor=[0.5, -0.1], loc='center', ncol=(len(ydata) + pc), prop=lbl_font) if self.plots['show_pct']: self.gen_pct = ' (%s%% of load)' % '{:0,.1f}'.format(gen_sum * 100. / load_sum) plt.title(self.hdrs['total'] + self.suffix + self.gen_pct) if self.plots['maximise']: mng = plt.get_current_fig_manager() if sys.platform == 'win32' or sys.platform == 'cygwin': if plt.get_backend() == 'TkAgg': mng.window.state('zoomed') elif plt.get_backend() == 'Qt4Agg': mng.window.showMaximized() else: mng.resize(*mng.window.maxsize()) if self.plots['block']: plt.show(block=True) else: plt.draw() if self.plots['month']: dayPlot(self, 'month', m24, mth_labels, mth_xlabels) if self.plots['season']: dayPlot(self, 'season', q24, ssn_labels, labels) if self.plots['period']: dayPlot(self, 'period', s24, smp_labels, labels) if not self.plots['block']: plt.show(block=True) def save_detail(self, data_file, techs, keys=None): if self.suffix != '': i = data_file.rfind('.') if i > 0: data_file = data_file[:i] + '_' + self.suffix + data_file[i:] else: data_file = data_file + '_' + self.suffix if data_file[-4:] == '.csv' or data_file[-4:] == '.xls' \ or data_file[-5:] == '.xlsx': pass else: data_file += '.xls' data_file = QtWidgets.QFileDialog.getSaveFileName(None, 'Save power data file', self.scenarios + data_file, 'Excel Files (*.xls*);;CSV Files (*.csv)')[0] if data_file == '': return if self.load_multiplier != 0: the_year = self.load_year else: the_year = self.base_year if data_file[-4:] == '.csv' or data_file[-4:] == '.xls' \ or data_file[-5:] == '.xlsx': pass else: data_file += '.xls' if os.path.exists(data_file): if os.path.exists(data_file + '~'): os.remove(data_file + '~') os.rename(data_file, data_file + '~') if keys is None: keys = sorted(techs.keys()) if data_file[-4:] == '.csv': tf = open(data_file, 'w') line = 'Hour,Period,' if self.load_multiplier != 0: the_year = self.load_year else: the_year = self.base_year max_outs = 0 lines = [] for i in range(8760): lines.append(str(i+1) + ',' + str(the_date(the_year, i)) + ',') for key in keys: if key[:4] == 'Load' and self.load_multiplier != 0: line += 'Load ' + self.load_year + ',' else: line += key + ',' for i in range(len(techs[key])): lines[i] += str(round(techs[key][i], 3)) + ',' tf.write(line + '\n') for i in range(len(lines)): tf.write(lines[i] + '\n') tf.close() del lines else: wb = xlwt.Workbook() ws = wb.add_sheet('Detail') ws.write(0, 0, 'Hour') ws.write(0, 1, 'Period') for i in range(len(self.x)): ws.write(i + 1, 0, i + 1) ws.write(i + 1, 1, the_date(the_year, i)) if 16 * 275 > ws.col(1).width: ws.col(1).width = 16 * 275 c = 2 for key in keys: if key[:4] == 'Load' and self.load_multiplier != 0: ws.write(0, c, 'Load ' + self.load_year) else: ws.write(0, c, key) if len(key) * 275 > ws.col(c).width: ws.col(c).width = len(key) * 275 for r in range(len(techs[key])): ws.write(r + 1, c, round(techs[key][r], 3)) c += 1 ws.set_panes_frozen(True) # frozen headings instead of split panes ws.set_horz_split_pos(1) # in general, freeze after last heading row ws.set_remove_splits(True) # if user does unfreeze, dont leave a split there wb.save(data_file) del wb # __init__ for PowerModel def __init__(self, stations, year=None, status=None, visualise=None, loadonly=False, progress=None): self.something = visualise self.something.power_signal = self self.status = status self.stations = stations self.progress = progress config = configparser.RawConfigParser() if len(sys.argv) > 1: config_file = sys.argv[1] else: config_file = getModelFile('SIREN.ini') config.read(config_file) self.expert = False try: expert = config.get('Base', 'expert_mode') if expert in ['true', 'on', 'yes']: self.expert = True except: pass if year is None: try: self.base_year = config.get('Base', 'year') except: self.base_year = '2012' else: self.base_year = year parents = [] try: parents = getParents(config.items('Parents')) except: pass try: scenario_prefix = config.get('Files', 'scenario_prefix') except: scenario_prefix = '' try: self.scenarios = config.get('Files', 'scenarios') if scenario_prefix != '' : self.scenarios += '/' + scenario_prefix for key, value in parents: self.scenarios = self.scenarios.replace(key, value) self.scenarios = self.scenarios.replace('$USER$', getUser()) self.scenarios = self.scenarios.replace('$YEAR$', self.base_year) i = self.scenarios.rfind('/') self.scenarios = self.scenarios[:i + 1] except: self.scenarios = '' try: self.load_file = config.get('Files', 'load') for key, value in parents: self.load_file = self.load_file.replace(key, value) self.load_file = self.load_file.replace('$USER$', getUser()) self.load_file = self.load_file.replace('$YEAR$', self.base_year) except: self.load_file = '' self.data_file = '' try: self.data_file = config.get('Files', 'data_file') except: try: self.data_file = config.get('Power', 'data_file') except: pass for key, value in parents: self.data_file = self.data_file.replace(key, value) self.data_file = self.data_file.replace('$USER$', getUser()) self.data_file = self.data_file.replace('$YEAR$', self.base_year) try: helpfile = config.get('Files', 'help') for key, value in parents: helpfile = helpfile.replace(key, value) helpfile = helpfile.replace('$USER$', getUser()) helpfile = helpfile.replace('$YEAR$', self.base_year) except: helpfile = '' if self.progress is None: self.show_progress = None else: progress_bar = True try: progress_bar = config.get('View', 'progress_bar') if progress_bar in ['false', 'no', 'off']: self.show_progress = None else: self.show_progress = True try: self.progress_bar = int(self.progress_bar) except: self.progress_bar = 0 except: self.show_progress = True self.progress_bar = 0 # # choose what power data to collect (once only) # self.plot_order = ['show_menu', 'actual', 'cumulative', 'by_station', 'adjust', 'show_load', 'shortfall', 'grid_losses', 'gross_load', 'save_plot', 'visualise', 'show_pct', 'maximise', 'block', 'by_day', 'by_month', 'by_season', 'by_period', 'hour', 'total', 'month', 'season', 'period', 'duration', 'augment', 'shortfall_detail', 'summary', 'save_data', 'save_detail', 'save_tech', 'save_match', 'financials'] self.initials = ['actual', 'by_station', 'grid_losses', 'save_data', 'gross_load', 'summary', 'financials'] #, 'show_menu'] self.hdrs = {'show_menu': 'Check / Uncheck all', 'actual': 'Generation - use actual generation figures', 'cumulative': 'Generation - total (cumulative)', 'by_station': 'Generation - from chosen stations', 'adjust': 'Generation - adjust generation', 'show_load': 'Generation - show Load', 'shortfall': 'Generation - show shortfall from Load', 'grid_losses': 'Generation - reduce generation by grid losses', 'gross_load': 'Add Existing Rooftop PV to Load (Gross Load)', 'save_plot': 'Save plot data', 'visualise': 'Visualise generation', 'show_pct': 'Show generation as a percentage of load', 'maximise': 'Maximise Plot windows', 'block': 'Show plots one at a time', 'by_day': 'Energy by day', 'by_month': 'Energy by month', 'by_season': 'Energy by season', 'by_period': 'Energy by period', 'hour': 'Power by hour', 'total': 'Power - diurnal profile', 'month': 'Power - diurnal profile by month', 'season': 'Power - diurnal profile by season', 'period': 'Power - diurnal profile by period', 'duration': 'Power - Load duration', 'augment': 'Power - augmented by hour', 'shortfall_detail': 'Power - Shortfall analysis', 'summary': 'Show Summary/Other Tables', 'save_data': 'Save initial Hourly Data Output', 'save_detail': 'Save Hourly Data Output by Station', 'save_tech': 'Save Hourly Data Output by Technology', 'save_match': 'Save Powermatch Inputs', 'financials': 'Run Financial Models'} self.spacers = {'actual': 'Show in Plot', 'save_plot': 'Choose plots (all use a full year of data)', 'summary': 'Choose tables'} self.plots = {} for i in range(len(self.plot_order)): self.plots[self.plot_order[i]] = False self.load_year = self.base_year if loadonly: if self.load_file == '' or not os.path.exists(self.load_file): self.load_file = None return plot_order = ['show_menu', 'save_plot', 'maximise', 'block', 'by_day', 'by_month', 'by_season', 'by_period', 'hour', 'total', 'month', 'season', 'period'] spacers = {'maximise': 'Choose plots (all use a full year of data)'} self.plots['show_load'] == True what_plots = whatPlots(self.plots, plot_order, self.hdrs, spacers, 0., self.base_year, self.load_year, 0, [0, 0], [0, 0], [0, 0], [], initial=False, helpfile=helpfile) what_plots.exec_() return self.technologies = '' self.load_growth = 0. self.storage = [0., 0.] self.recharge = [0., 1.] self.discharge = [0., 1.] plot_opts = [] try: plot_opts = config.items('Power') except: pass for key, value in plot_opts: if key == 'save_balance': # old name for save_match key = 'save_match' if key in self.plots: if value.lower() in ['true', 'yes', 'on']: self.plots[key] = True elif key == 'load_growth': if value[-1] == '%': self.load_growth = float(value[:-1]) / 100. else: self.load_growth = float(value) elif key == 'storage': if ',' in value: bits = value.split(',') self.storage = [float(bits[0].strip()), float(bits[1].strip())] else: self.storage = [float(value), 0.] elif key == 'technologies': self.technologies = value elif key == 'shortfall_iterations': self.iterations = int(value) try: storage = config.get('Storage', 'storage') if ',' in storage: bits = storage.split(',') self.storage = [float(bits[0].strip()), float(bits[1].strip())] else: self.storage = [float(storage), 0.] except: pass try: self.show_menu = self.plots['show_menu'] except: self.show_menu = True try: self.discharge[0] = float(config.get('Storage', 'discharge_max')) self.discharge[1] = float(config.get('Storage', 'discharge_eff')) if self.discharge[1] < 0.5: self.discharge[1] = 1 - self.discharge[1] self.recharge[0] = float(config.get('Storage', 'recharge_max')) self.recharge[1] = float(config.get('Storage', 'recharge_eff')) if self.recharge[1] < 0.5: self.recharge[1] = 1 - self.recharge[1] except: pass if __name__ == '__main__': self.show_menu = True self.plots['save_data'] = True if not self.plots['save_data']: self.plot_order.remove('save_data') if len(self.stations) == 1: self.plot_order.remove('cumulative') self.plot_order.remove('by_station') self.plot_order.remove('gross_load') # check if we can find a load file if self.load_file == '' or not os.path.exists(self.load_file): self.can_do_load = False self.plot_order.remove('augment') self.plot_order.remove('duration') self.plot_order.remove('show_load') self.plot_order.remove('show_pct') self.plot_order.remove('shortfall') self.plot_order.remove('shortfall_detail') else: self.can_do_load = True if self.show_menu: if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv) what_plots = whatPlots(self.plots, self.plot_order, self.hdrs, self.spacers, self.load_growth, self.base_year, self.load_year, self.iterations, self.storage, self.discharge, self.recharge, initial=True, helpfile=helpfile) what_plots.exec_() self.plots, self.load_growth, self.load_year, self.load_multiplier, self.iterations, \ self.storage, self.discharge, self.recharge = what_plots.getValues() if self.plots is None: self.something.power_signal = None return self.selected = None if self.plots['by_station']: self.selected = [] if len(stations) == 1: self.selected.append(stations[0].name) else: selected = whatStations(stations, self.plots['gross_load'], self.plots['actual']) selected.exec_() self.selected = selected.getValues() if self.selected is None: return # # collect the data (once only) # self.stn_outs = [] self.model = SuperPower(stations, self.plots, False, year=self.base_year, selected=self.selected, status=status, progress=self.progress) self.model.getPower() if len(self.model.power_summary) == 0: return self.power_summary = self.model.power_summary self.ly, self.x = self.model.getLy() if self.plots['save_data'] or self.plots['financials'] or self.plots['save_detail']: self.stn_outs, self.stn_tech, self.stn_size, self.stn_pows, self.stn_grid, \ self.stn_path = self.model.getStnOuts() elif self.plots['save_tech'] or self.plots['save_match']: self.stn_outs, self.stn_tech = self.model.getStnTech() elif self.plots['visualise']: self.stn_outs, self.stn_pows = self.model.getStnPows() self.suffix = '' if len(self.stations) == 1: self.suffix = ' - ' + self.stations[0].name elif len(self.stn_outs) == 1: self.suffix = ' - ' + self.stn_outs[0] elif self.plots['by_station']: if len(self.ly) == 1: self.suffix = ' - ' + list(self.ly.keys())[0] else: self.suffix = ' - Chosen Stations' if self.plots['save_data']: if self.data_file == '': data_file = 'Power_Table_%s.xls' % \ QtCore.QDateTime.toString(QtCore.QDateTime.currentDateTime(), 'yyyy-MM-dd_hhmm') else: data_file = self.data_file stnsh = {} for i in range(len(self.stn_outs)): stnsh[self.stn_outs[i]] = self.stn_pows[i][:] self.save_detail(data_file, stnsh) del stnsh if self.plots['summary']: fields = ['name', 'technology', 'capacity', 'cf', 'generation'] sumfields = ['capacity', 'generation'] if getattr(self.power_summary[0], 'transmitted') != None: fields.append('transmitted') sumfields.append('transmitted') dialog = displaytable.Table(self.power_summary, sumfields=sumfields, units='capacity=MW generation=MWh transmitted=MWh', sumby='technology', fields=fields, save_folder=self.scenarios) dialog.exec_() del dialog if self.plots['financials']: do_financials = True else: do_financials = False if self.plots['save_data'] or self.plots['summary']: show_summ = True else: show_summ = False do_plots = True # # loop around processing plots # if do_plots: if plt.get_backend() != 'TkAgg': plt.switch_backend('TkAgg') self.gen_pct = None self.load_data = None if self.plots['save_detail']: pass else: self.initials.append('save_detail') if not self.plots['save_tech']: self.initials.append('save_tech') if not self.plots['visualise']: self.initials.append('visualise') self.load_key = '' self.adjustby = None while True: if self.plots['visualise'] and self.something is not None: vis2 = Visualise(self.stn_outs, self.stn_pows, self.something, year=self.base_year) vis2.setWindowModality(Qt.Qt.WindowModal) vis2.setWindowFlags(vis2.windowFlags() | Qt.Qt.WindowSystemMenuHint | Qt.Qt.WindowMinMaxButtonsHint) vis2.exec_() wrkly = {} summs = {} if self.load_key != '': try: del wrkly[self.load_key] except: pass self.load_key = '' if (self.plots['show_load'] or self.plots['save_match'] or self.plots['shortfall'] \ or self.plots['shortfall_detail']) and self.can_do_load: if self.load_data is None: tf = open(self.load_file, 'r') lines = tf.readlines() tf.close() self.load_data = [] bit = lines[0].rstrip().split(',') if len(bit) > 0: # multiple columns for b in range(len(bit)): if bit[b][:4].lower() == 'load': if bit[b].lower().find('kwh') > 0: # kWh not MWh for i in range(1, len(lines)): bit = lines[i].rstrip().split(',') self.load_data.append(float(bit[b]) * 0.001) else: for i in range(1, len(lines)): bit = lines[i].rstrip().split(',') self.load_data.append(float(bit[b])) else: for i in range(1, len(lines)): self.load_data.append(float(lines[i].rstrip())) if self.load_multiplier != 0: key = 'Load ' + self.load_year else: key = 'Load' # lines[0].rstrip() self.load_key = key wrkly[key] = [] if self.load_multiplier != 0: for i in range(len(self.load_data)): wrkly[key].append(self.load_data[i] * (1 + self.load_multiplier)) else: wrkly[key] = self.load_data[:] else: self.plots['show_pct'] = False if self.plots['adjust']: if self.load_key == '': if self.adjustby is None: adjust = Adjustments(list(self.ly.keys())) else: adjust = Adjustments(self.adjustby) else: if self.adjustby is None: adjust = Adjustments(list(self.ly.keys()), self.load_key, wrkly[self.load_key], self.ly, self.load_year) else: adjust = Adjustments(self.adjustby, self.load_key, wrkly[self.load_key], self.ly, self.load_year) adjust.exec_() self.adjustby = adjust.getValues() else: self.adjustby = None for key in self.ly: if self.adjustby is None: wrkly[key] = self.ly[key][:] else: wrkly[key] = [] if key == 'Generation': for i in range(len(self.ly[key])): wrkly[key].append(self.ly[key][i]) else: for i in range(len(self.ly[key])): wrkly[key].append(self.ly[key][i] * self.adjustby[key]) if self.plots['shortfall'] or self.plots['shortfall_detail'] or self.plots['save_match']: self.plots['show_load'] = True self.plots['cumulative'] = True try: del wrkly['Storage'] except: pass if self.load_data is None: self.do_load = False else: self.do_load = True if self.plots['show_load']: total_gen = [] for i in range(len(self.x)): total_gen.append(0.) for key, value in wrkly.items(): if key == 'Generation': continue if key == 'Storage' or key == 'Excess': continue elif key[:4] == 'Load': pass else: for i in range(len(value)): total_gen[i] += value[i] if self.storage[0] > 0: wrkly['Storage'] = [] wrkly['Excess'] = [] for i in range(len(self.x)): wrkly['Storage'].append(0.) wrkly['Excess'].append(0.) storage_cap = self.storage[0] * 1000. if self.storage[1] > self.storage[0]: storage_carry = self.storage[0] * 1000. else: storage_carry = self.storage[1] * 1000. storage_bal = [] storage_losses = [] for i in range(len(self.x)): gap = gape = total_gen[i] - wrkly[self.load_key][i] storage_loss = 0. if gap >= 0: # excess generation if self.recharge[0] > 0 and gap > self.recharge[0]: gap = self.recharge[0] if storage_carry >= storage_cap: wrkly['Excess'][i] = gape else: if storage_carry + gap > storage_cap: gap = (storage_cap - storage_carry) * (1 / self.recharge[1]) storage_loss = gap - gap * self.recharge[1] storage_carry += gap - storage_loss if gape - gap > 0: wrkly['Excess'][i] = gape - gap if storage_carry > storage_cap: storage_carry = storage_cap else: if self.discharge[0] > 0 and -gap > self.discharge[0]: gap = -self.discharge[0] if storage_carry > -gap / self.discharge[1]: # extra storage wrkly['Storage'][i] = -gap storage_loss = gap * self.discharge[1] - gap storage_carry += gap - storage_loss else: # not enough storage if self.discharge[0] > 0 and storage_carry > self.discharge[0]: storage_carry = self.discharge[0] wrkly['Storage'][i] = storage_carry * (1 / (2 - self.discharge[1])) storage_loss = storage_carry - wrkly['Storage'][i] storage_carry = 0 # ???? bug ??? storage_bal.append(storage_carry) storage_losses.append(storage_loss) if self.plots['shortfall_detail']: shortstuff = [] for i in range(len(self.x)): shortfall = total_gen[i] + wrkly['Storage'][i] - wrkly[self.load_key][i] if shortfall > 0: shortfall = 0 shortstuff.append(ColumnData(i + 1, the_date(self.load_year, i), [wrkly[self.load_key][i], total_gen[i], wrkly['Storage'][i], storage_losses[i], storage_bal[i], shortfall, wrkly['Excess'][i]], values=['load', 'generation', 'storage_used', 'storage_loss', 'storage_balance', 'shortfall', 'excess'])) dialog = displaytable.Table(shortstuff, title='Storage', save_folder=self.scenarios, fields=['hour', 'period', 'load', 'generation', 'storage_used', 'storage_loss', 'storage_balance', 'shortfall', 'excess']) dialog.exec_() del dialog del shortstuff if show_summ: summs['Shortfall'] = ['', '', 0., 0] summs['Excess'] = ['', '', 0., 0] for i in range(len(self.x)): if total_gen[i] > wrkly[self.load_key][i]: summs['Excess'][2] += total_gen[i] - wrkly[self.load_key][i] else: summs['Shortfall'][2] += total_gen[i] - wrkly[self.load_key][i] summs['Shortfall'][2] = round(summs['Shortfall'][2], 1) summs['Excess'][2] = round(summs['Excess'][2], 1) elif show_summ or self.plots['shortfall_detail']: if self.plots['shortfall_detail']: shortstuff = [] for i in range(len(self.x)): if total_gen[i] > wrkly[self.load_key][i]: excess = total_gen[i] - wrkly[self.load_key][i] shortfall = 0 else: shortfall = total_gen[i] - wrkly[self.load_key][i] excess = 0 shortstuff.append(ColumnData(i + 1, the_date(self.load_year, i), [wrkly[self.load_key][i], total_gen[i], shortfall, excess], values=['load', 'generation', 'shortfall', 'excess'])) dialog = displaytable.Table(shortstuff, title='Hourly Shortfall', save_folder=self.scenarios, fields=['hour', 'period', 'load', 'generation', 'shortfall', 'excess']) dialog.exec_() del dialog del shortstuff else: summs['Shortfall'] = ['', '', 0., 0] summs['Excess'] = ['', '', 0., 0] for i in range(len(self.x)): if total_gen[i] > wrkly[self.load_key][i]: summs['Excess'][2] += total_gen[i] - wrkly[self.load_key][i] else: summs['Shortfall'][2] += total_gen[i] - wrkly[self.load_key][i] summs['Shortfall'][2] = round(summs['Shortfall'][2], 1) summs['Excess'][2] = round(summs['Excess'][2], 1) if show_summ and self.adjustby is not None: keys = [] for key in wrkly: keys.append(key) gen = 0. for i in range(len(wrkly[key])): gen += wrkly[key][i] if key[:4] == 'Load': incr = 1 + self.load_multiplier else: try: incr = self.adjustby[key] except: incr = '' try: summs[key] = [0., round(incr, 2), round(gen, 1), 0] if key[:4] == 'Load': summs[key][0] = '' except: summs[key] = ['', '', round(gen, 1), 0] keys.sort() xtra = ['Generation', 'Load', 'Gen. - Load', 'Storage Capacity', 'Storage', 'Shortfall', 'Excess'] o = 0 gen = 0. if self.storage[0] > 0: summs['Storage Capacity'] = [self.storage[0] * 1000., '', '', 0] for i in range(len(keys)): if keys[i][:4] == 'Load': xtra[1] = keys[i] elif keys[i] in xtra: continue else: o += 1 summs[keys[i]][3] = o gen += summs[keys[i]][2] if xtra[0] not in list(summs.keys()): summs[xtra[0]] = ['', '', gen, 0] if xtra[1] in list(summs.keys()): summs[xtra[2]] = ['', '', round(gen - summs[xtra[1]][2], 1), 0] for i in range(len(xtra)): if xtra[i] in list(summs.keys()): o += 1 summs[xtra[i]][3] = o try: summs['Storage Used'] = summs.pop('Storage') except: pass try: summs['Excess Gen.'] = summs.pop('Excess') except: pass for it in self.power_summary: if self.plots['by_station']: try: summs[it.name][0] = it.capacity except: pass else: try: summs[it.technology][0] += it.capacity except: pass for key, value in summs.items(): try: value[0] = round(value[0], 2) except: pass dialog = displaytable.Table(summs, title='Generation Summary', save_folder=self.scenarios, fields=['component', 'capacity', 'multiplier', 'generation', 'row'], units='generation=MWh', sortby='row') dialog.exec_() del dialog if self.plots['save_detail'] or self.plots['save_tech']: dos = [] if self.plots['save_detail']: dos.append('') if self.plots['save_tech']: dos.append('Technology_') for l in range(len(dos)): if self.data_file == '': if year is None: data_file = 'Power_Detail_%s%s.xls' % ( dos[l] , QtCore.QDateTime.toString(QtCore.QDateTime.currentDateTime(), 'yyyy-MM-dd_hhmm')) else: data_file = 'Power_Detail_%s%s_%s.xls' % ( dos[l] , str(year), QtCore.QDateTime.toString(QtCore.QDateTime.currentDateTime(), 'yyyy-MM-dd_hhmm')) else: data_file = self.data_file keys = [] keys2 = [] if dos[l] != '': techs = {} for key, value in iter(wrkly.items()): try: i = self.stn_outs.index(key) if self.stn_tech[i] in list(techs.keys()): for j in range(len(value)): techs[self.stn_tech[i]][j] += value[j] else: techs[self.stn_tech[i]] = value[:] keys.append(self.stn_tech[i]) except: techs[key] = value[:] keys2.append(key) keys.sort() keys2.extend(keys) # put Load first self.save_detail(data_file, techs, keys=keys2) del techs else: for key in list(wrkly.keys()): try: i = self.stn_outs.index(key) keys.append(self.stn_outs[i]) except: keys2.append(key) keys.sort() keys2.extend(keys) # put Load first self.save_detail(data_file, wrkly, keys=keys2) self.showGraphs(wrkly, self.x) if __name__ == '__main__': self.show_menu = True self.plots['save_data'] = True if self.show_menu: what_plots = whatPlots(self.plots, self.plot_order, self.hdrs, self.spacers, self.load_growth, self.base_year, self.load_year, self.iterations, self.storage, self.discharge, self.recharge, self.initials, helpfile=helpfile) what_plots.exec_() self.plots, self.load_growth, self.load_year, self.load_multiplier, \ self.iterations, self.storage, self.discharge, self.recharge = what_plots.getValues() if self.plots is None: break else: break # # loop around doing financials # # run the financials model if do_financials: self.financial_parms = None while True: self.financials = FinancialModel(self.stn_outs, self.stn_tech, self.stn_size, self.stn_pows, self.stn_grid, self.stn_path, year=self.base_year, parms=self.financial_parms, status=self.status) if self.financials.stations is None: break self.financial_parms = self.financials.getParms() fin_fields = ['name', 'technology', 'capacity', 'generation', 'cf', 'capital_cost', 'lcoe_real', 'lcoe_nominal', 'npv'] fin_sumfields = ['capacity', 'generation', 'capital_cost', 'npv'] fin_units = 'capacity=MW generation=MWh capital_cost=$ lcoe_real=c/kWh' + \ ' lcoe_nominal=c/kWh npv=$' tot_capital = 0. tot_capacity = 0. tot_generation = 0. tot_lcoe_real = [0., 0.] tot_lcoe_nom = [0., 0.] for stn in self.financials.stations: tot_capital += stn.capital_cost tot_capacity += stn.capacity tot_generation += stn.generation for stn in self.financials.stations: tot_lcoe_real[0] += stn.lcoe_real * (stn.generation / tot_generation) tot_lcoe_nom[0] += stn.lcoe_nominal * (stn.generation / tot_generation) tot_lcoe_real[1] += stn.lcoe_real * (stn.capacity / tot_capacity) tot_lcoe_nom[1] += stn.lcoe_nominal * (stn.capacity / tot_capacity) if stn.grid_cost > 0: i = fin_fields.index('capital_cost') fin_fields.insert(i + 1, 'grid_cost') fin_sumfields.append('grid_cost') fin_units += ' grid_cost=$' break tot_fields = [['cf', tot_generation / tot_capacity / 8760], ['lcoe_real', tot_lcoe_real[0]], ['lcoe_nominal', tot_lcoe_nom[0]]] dialog = displaytable.Table(self.financials.stations, fields=fin_fields, sumfields=fin_sumfields, units=fin_units, sumby='technology', save_folder=self.scenarios, title='Financials', totfields=tot_fields) dialog.exec_() del dialog self.something.power_signal = None def getValues(self): try: return self.power_summary except: return None def getPct(self): return self.gen_pct @QtCore.pyqtSlot() def exit(self): self.something.power_signal = None return #exit()
agpl-3.0
-7,718,492,627,276,156,000
47.564084
128
0.419427
false
4.015181
false
false
false
divergentdave/inspectors-general
inspectors/pbgc.py
3
7559
#!/usr/bin/env python import datetime import logging import os import re from urllib.parse import urljoin from utils import utils, inspector # http://oig.pbgc.gov/ archive = 1998 # options: # standard since/year options for a year range to fetch from. # # Notes for IG's web team: # AUDIT_REPORTS_URL = "http://oig.pbgc.gov/evaluations/{year}.html" CONGRESSIONAL_REQUESTS_URL = "http://oig.pbgc.gov/requests.html" SEMIANNUAL_REPORTS_URL = "http://oig.pbgc.gov/reports.html" CONGRESSIONAL_TESTIMONY_URL = "http://oig.pbgc.gov/testimony.html" BASE_REPORT_URL = "http://oig.pbgc.gov/" HEADER_ROW_TEXT = [ 'Audits', 'Evaluations', 'Special Reports', 'Report Title', 'Management Advisory Reports', 'Announcements', 'Inspections', ] PDF_REGEX = re.compile("\.pdf") def run(options): year_range = inspector.year_range(options, archive) # Pull the audit reports for year in year_range: if year < 1998: # The earliest year for audit reports continue year_url = AUDIT_REPORTS_URL.format(year=year) doc = utils.beautifulsoup_from_url(year_url) results = doc.select("tr") if not results: raise inspector.NoReportsFoundError("Pension Benefit Guaranty Corporation (audit reports)") for result in results: report = report_from(result, report_type='audit', year_range=year_range) if report: inspector.save_report(report) # Pull the congressional requests doc = utils.beautifulsoup_from_url(CONGRESSIONAL_REQUESTS_URL) results = doc.select("tr") if not results: raise inspector.NoReportsFoundError("Pension Benefit Guaranty Corporation (congressional requests)") for result in results: report = report_from(result, report_type='congress', year_range=year_range) if report: inspector.save_report(report) # Pull the semiannual reports doc = utils.beautifulsoup_from_url(SEMIANNUAL_REPORTS_URL) results = doc.select("div.holder a") if not results: raise inspector.NoReportsFoundError("Pension Benefit Guaranty Corporation (semiannual reports)") for result in results: report = semiannual_report_from(result, year_range) if report: inspector.save_report(report) # Pull the congressional testimony doc = utils.beautifulsoup_from_url(CONGRESSIONAL_TESTIMONY_URL) results = doc.select("div.holder a") if not results: raise inspector.NoReportsFoundError("Pension Benefit Guaranty Corporation (congressional testimony)") for result in results: report = testimony_report_from(result, year_range) if report: inspector.save_report(report) saved_report_urls = set() def report_from(result, report_type, year_range): tds = result.select("td") if len(tds) > 0: title = inspector.sanitize(tds[0].text) else: return if (not title) or (title in HEADER_ROW_TEXT): # Skip the header rows return published_on_text = tds[2].text try: published_on = datetime.datetime.strptime(published_on_text, '%m/%d/%Y') except ValueError: published_on = datetime.datetime.strptime(published_on_text, '%m/%Y') if published_on.year not in year_range: logging.debug("[%s] Skipping, not in requested range." % title) return unreleased = False link = result.find("a") landing_url = urljoin(BASE_REPORT_URL, link.get('href')) if landing_url.endswith(".pdf"): # Inline report report_url = landing_url landing_url = None summary = None else: landing_page = utils.beautifulsoup_from_url(landing_url) summary = " ".join(landing_page.select("div.holder")[0].text.split()) report_link = landing_page.find("a", href=PDF_REGEX) if report_link: report_url = urljoin(landing_url, report_link.get('href')) else: unreleased = True report_url = None report_id = tds[1].text.strip().replace("/", "-").replace(" ", "-") if report_id == "N-A": report_id = tds[0].text.strip().replace("/", "-").replace(" ", "-") if report_id == "": if report_url: report_id = os.path.splitext(os.path.basename(report_url))[0] else: report_id = os.path.splitext(os.path.basename(landing_url))[0] if report_url: # OIG MAR-2012-10/PA-12-87 is posted under both Audits/Evaluations/MARs and # Congressional Requests. if report_url in saved_report_urls: return saved_report_urls.add(report_url) report = { 'inspector': "pbgc", 'inspector_url': "http://oig.pbgc.gov", 'agency': "pbgc", 'agency_name': "Pension Benefit Guaranty Corporation", 'type': report_type, 'report_id': report_id, 'url': report_url, 'title': title, 'published_on': datetime.datetime.strftime(published_on, "%Y-%m-%d"), } if summary: report['summary'] = summary if unreleased: report['unreleased'] = unreleased if landing_url: report['landing_url'] = landing_url return report def semiannual_report_from(result, year_range): # This will look like "toggleReport('SARC-47-49');" and we want to pull out # the SARC-47-49 report_id_javascript = result.get('onclick') report_id = re.search("'(.*)'", report_id_javascript).groups()[0] landing_url = "http://oig.pbgc.gov/sarc/{report_id}.html".format(report_id=report_id) landing_page = utils.beautifulsoup_from_url(landing_url) title = " ".join(landing_page.select("h3")[0].text.split()) relative_report_url = landing_page.find("a", text="Read Full Report").get('href') # The relative report urls try to go up a level too many. Most browsers seem # to just ignore this so we will too. relative_report_url = relative_report_url.replace("../", "", 1) report_url = urljoin(SEMIANNUAL_REPORTS_URL, relative_report_url) # There is probably a way to be a bit smarter about this summary = landing_page.text.strip() published_on_text = title.rsplit("-")[-1].rsplit("through")[-1].replace(".", "").strip() published_on = datetime.datetime.strptime(published_on_text, '%B %d, %Y') if published_on.year not in year_range: logging.debug("[%s] Skipping, not in requested range." % title) return report = { 'inspector': "pbgc", 'inspector_url': "http://oig.pbgc.gov", 'agency': "pbgc", 'agency_name': "Pension Benefit Guaranty Corporation", 'type': 'semiannual_report', 'report_id': report_id, 'url': report_url, 'title': title, 'published_on': datetime.datetime.strftime(published_on, "%Y-%m-%d"), } if summary: report['summary'] = summary if landing_url: report['landing_url'] = landing_url return report def testimony_report_from(result, year_range): title = result.text report_url = urljoin(BASE_REPORT_URL, result.get('href')) report_filename = report_url.split("/")[-1] report_id, _ = os.path.splitext(report_filename) published_on_text = "-".join(re.search('\((\w+) (\d+), (\d{4})\)', result.text).groups()) try: published_on = datetime.datetime.strptime(published_on_text, '%b-%d-%Y') except ValueError: published_on = datetime.datetime.strptime(published_on_text, '%B-%d-%Y') if published_on.year not in year_range: logging.debug("[%s] Skipping, not in requested range." % title) return report = { 'inspector': "pbgc", 'inspector_url': "http://oig.pbgc.gov", 'agency': "pbgc", 'agency_name': "Pension Benefit Guaranty Corporation", 'type': 'testimony', 'report_id': report_id, 'url': report_url, 'title': title, 'published_on': datetime.datetime.strftime(published_on, "%Y-%m-%d"), } return report utils.run(run) if (__name__ == "__main__") else None
cc0-1.0
4,551,702,568,992,596,000
30.894515
105
0.671385
false
3.157477
true
false
false
TotalF2PSkillers/f2p-state
runeclan/xp_tracker.py
1
1932
import requests from lxml import html import runeclan.scraper_utils as scraper BASEPATH='http://www.runeclan.com/clan/' XP_TRACKER_PATH='/xp-tracker/' CRITERIA1_PARAM='criteria_set1' CRITERIA2_PARAM='criteria_set2' SKILL_PARAM='skill' LAST_MONTH_CRITERIA='last_month' CURRENT_MONTH_CRITERIA='month' DOUBLE_WP_WEEKEND_CRITERIA='double_xp_weekend' LAST_YEAR_CRITERIA='last_year' ACTIVE_PER_PAGE=50 OVERALL_SKILL_LABEL='Overall' def get_users(clan, skill, top, mode): page = 0 result = {'skill': '', 'highscores': []} basepath = BASEPATH + __get_clan_identifier(clan) while True: page = page + 1 users = __get_active_users_page(basepath, skill, page, mode) result['highscores'].extend(users['highscores']) result['skill'] = users['skill'] if len(users['highscores']) < ACTIVE_PER_PAGE or (len(result['highscores']) >= top and users['skill'] != OVERALL_SKILL_LABEL): break return result def __get_clan_identifier(clan): return clan.replace(' ', '_') def __get_active_users_page(basepath, skill, page, duration): response = requests.get(basepath + XP_TRACKER_PATH + str(page), params=__get_parameters(duration, skill)) dom = html.fromstring(response.content) return {'highscores':scraper.get_active_users(dom), 'skill': scraper.get_active_skill(dom)} def __get_parameters(mode, skill): if mode == 'preview': return {CRITERIA1_PARAM: CURRENT_MONTH_CRITERIA, CRITERIA2_PARAM: LAST_MONTH_CRITERIA, SKILL_PARAM: skill} elif mode == 'normal': return {CRITERIA1_PARAM: LAST_MONTH_CRITERIA, CRITERIA2_PARAM: CURRENT_MONTH_CRITERIA, SKILL_PARAM: skill} elif mode == 'year': return {CRITERIA1_PARAM: LAST_YEAR_CRITERIA, CRITERIA2_PARAM: CURRENT_MONTH_CRITERIA, SKILL_PARAM: skill} else: return {CRITERIA1_PARAM: DOUBLE_WP_WEEKEND_CRITERIA, CRITERIA2_PARAM: LAST_MONTH_CRITERIA, SKILL_PARAM: skill}
apache-2.0
2,997,439,324,685,730,000
34.796296
134
0.689959
false
3.004666
false
false
false
SnowWalkerJ/quantlib
quant/data/wind/tables/ashareholdernumber.py
1
1168
from ....common.db.sql import VARCHAR, Numeric as NUMBER, DateTime as DATETIME, Column, BaseModel, CLOB, DATE VARCHAR2 = VARCHAR class AShareHolderNumber(BaseModel): """ 4.37 中国A股股东户数 Attributes ---------- object_id: VARCHAR2(100) 对象ID s_info_windcode: VARCHAR2(40) Wind代码 ann_dt: VARCHAR2(8) 公告日期 s_holder_enddate: VARCHAR2(8) 截止日期 s_holder_num: NUMBER(20,4) A股股东户数 s_holder_total_num: NUMBER(20,4) 股东总户数 若纯A股为A股户数;若含B股则为AB股总户数;若含H股则为AH股总户数;若含境外股, 则为A和境外股总户数 opdate: DATETIME opdate opmode: VARCHAR(1) opmode """ __tablename__ = "AShareHolderNumber" object_id = Column(VARCHAR2(100), primary_key=True) s_info_windcode = Column(VARCHAR2(40)) ann_dt = Column(VARCHAR2(8)) s_holder_enddate = Column(VARCHAR2(8)) s_holder_num = Column(NUMBER(20,4)) s_holder_total_num = Column(NUMBER(20,4)) opdate = Column(DATETIME) opmode = Column(VARCHAR(1))
gpl-3.0
5,777,270,374,049,963,000
26
109
0.614035
false
2.353211
false
false
false
shub0/algorithm-data-structure
python/tree_traversal.py
1
6044
#! /usr/bin/python ''' Binary Tree Traversal ''' from node_struct import TreeNode class Solution: def preOrderTraversalNonRecursive(self, root): output = list() nodes_in_tree = list() current_node = root while len(nodes_in_tree) > 0 or current_node: while current_node: output.append(current_node.val) # visit() nodes_in_tree.append(current_node) current_node = current_node.left current_node = nodes_in_tree.pop() current_node = current_node.right return output def inOrderTraversalNonRecursive(self, root): output = list() nodes_in_tree = list() current_node = root while len(nodes_in_tree) > 0 or current_node: while current_node: nodes_in_tree.append(current_node) current_node = current_node.left current_node = nodes_in_tree.pop() output.append(current_node.val) # visit() current_node = current_node.right return output def postOrderTraversalNonRecursive(self, root): output = list() nodes_in_tree = list() current_node = root prev_node = None while current_node: while current_node.left: nodes_in_tree.append(current_node) current_node = current_node.left while not current_node.right or current_node.right == prev_node: output.append(current_node.val) prev_node = current_node if len(nodes_in_tree) == 0: return output current_node = nodes_in_tree.pop() nodes_in_tree.append(current_node) current_node = current_node.right return output def printReverse(self, start, end, output): self.reverse(start, end) curr = end while True: output.append(curr.val) if (curr == start): break curr = curr.right self.reverse(start, end) def levelOrderTraversal(self, root): output = list() if not root: return output nodes_in_tree = list() nodes_in_tree.append(root) nodes_in_tree.append(None) curr_level = list() while len(nodes_in_tree) > 0: current_node = nodes_in_tree.pop(0) if not current_node: output.append(curr_level[:]) curr_level = list() if len(nodes_in_tree) > 0: nodes_in_tree.append(None) else: return output else: curr_level.append(current_node.val) if current_node.left: nodes_in_tree.append(current_node.left) if current_node.right: nodes_in_tree.append(current_node.right) return output def preorderMorris(self, root): curr = root output = list() while curr: if not curr.left: output.append(curr.val) curr = curr.right else: cursor = curr.left while cursor.right and cursor.right != curr: cursor = cursor.right if cursor.right == curr: cursor.right = None curr = curr.right else: output.append(curr.val) cursor.right = curr curr = curr.left return output def inorderMorris(self, root): curr = root output = list() while curr: if not curr.left: output.append(curr.val) curr = curr.right else: cursor = curr.left while cursor.right and cursor.right != curr: cursor = cursor.right if not cursor.right: cursor.right = curr curr = curr.left else: cursor.right = None output.append(curr.val) curr = curr.right return output def reverse(self, start, end): if start == end: return x, y = start, start.right while x != end: z = y.right y.right = x x = y y = z def postorderMorris(self, root): pivot = TreeNode(0) pivot.left = root curr = pivot output = list() def printReverse(start, end): self.reverse(start, end) curr = end while True: output.append(curr.val) if (curr == start): break curr = curr.right self.reverse(end, start) while curr: if not curr.left: curr = curr.right continue cursor = curr.left while cursor.right and cursor.right != curr: cursor = cursor.right if not cursor.right: cursor.right = curr curr = curr.left else: printReverse(curr.left, cursor) cursor.right = None curr = curr.right return output if __name__ == '__main__': solution = Solution() root = TreeNode(1) root.left = TreeNode(0) root.right = TreeNode(2) root.right.right = TreeNode(3) print 'pre order: %s' % solution.preOrderTraversalNonRecursive(root) print 'pre order: %s' % solution.preorderMorris(root) print 'in order: %s' % solution.inOrderTraversalNonRecursive(root) print 'in order: %s' % solution.inorderMorris(root) print 'post order: %s' % solution.postOrderTraversalNonRecursive(root) print 'post order: %s' % solution.postorderMorris(root) print 'level order: %s'% solution.levelOrderTraversal(root)
bsd-3-clause
7,991,569,660,289,871,000
31.847826
76
0.506287
false
4.341954
false
false
false
pmacosta/putil
docs/support/pcsv_example_5.py
1
1211
# pcsv_example_5.py # Copyright (c) 2013-2016 Pablo Acosta-Serafini # See LICENSE for details # pylint: disable=C0111,C0410,W0104 import putil.misc, putil.pcsv def main(): ctx = putil.misc.TmpFile with ctx() as ifname: with ctx() as ofname: # Create first data file data = [ ['Ctrl', 'Ref', 'Result'], [1, 3, 10], [1, 4, 20], [2, 4, 30], [2, 5, 40], [3, 5, 50] ] putil.pcsv.write(ifname, data, append=False) # Sort putil.pcsv.dsort( fname=ifname, order=[{'Ctrl':'D'}, {'Ref':'A'}], has_header=True, ofname=ofname ) # Verify that resulting file is correct ref_data = [ [3, 5, 50], [2, 4, 30], [2, 5, 40], [1, 3, 10], [1, 4, 20] ] obj = putil.pcsv.CsvFile(ofname, has_header=True) assert obj.header() == ['Ctrl', 'Ref', 'Result'] assert obj.data() == ref_data if __name__ == '__main__': main()
mit
-8,343,569,794,735,711,000
27.833333
61
0.410405
false
3.58284
false
false
false
victoredwardocallaghan/xen
tools/python/xen/util/acmpolicy.py
44
60076
#============================================================================ # This library is free software; you can redistribute it and/or # modify it under the terms of version 2.1 of the GNU Lesser General Public # License as published by the Free Software Foundation. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #============================================================================ # Copyright (C) 2006,2007 International Business Machines Corp. # Author: Stefan Berger <stefanb@us.ibm.com> #============================================================================ import os import stat import array import struct import shutil import commands # sha is deprecated as of python 2.6 try: from hashlib import sha1 except ImportError: # but hashlib was only added in python 2.5 from sha import new as sha1 from xml.dom import minidom, Node from xen.xend.XendLogging import log from xen.util import xsconstants, bootloader, mkdir from xen.util.xspolicy import XSPolicy from xen.xend.XendError import SecurityError import xen.util.xsm.acm.acm as security from xen.util.xsm.xsm import XSMError from xen.xend import XendOptions ACM_POLICIES_DIR = security.policy_dir_prefix + "/" # Constants needed for generating a binary policy from its XML # representation ACM_POLICY_VERSION = 4 # Latest one ACM_CHWALL_VERSION = 1 ACM_STE_VERSION = 1 ACM_MAGIC = 0x001debc; ACM_NULL_POLICY = 0 ACM_CHINESE_WALL_POLICY = 1 ACM_SIMPLE_TYPE_ENFORCEMENT_POLICY = 2 ACM_POLICY_UNDEFINED = 15 ACM_LABEL_UNLABELED = "__UNLABELED__" ACM_LABEL_UNLABELED_DISPLAY = "unlabeled" """ Error codes reported in when trying to test for a new policy These error codes are reported in an array of tuples where each error code is followed by a parameter describing the error more closely, such as a domain id. """ ACM_EVTCHN_SHARING_VIOLATION = 0x100 ACM_GNTTAB_SHARING_VIOLATION = 0x101 ACM_DOMAIN_LOOKUP = 0x102 ACM_CHWALL_CONFLICT = 0x103 ACM_SSIDREF_IN_USE = 0x104 DEFAULT_policy = \ "<?xml version=\"1.0\" ?>\n" +\ "<SecurityPolicyDefinition xmlns=\"http://www.ibm.com\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.ibm.com ../../security_policy.xsd\">\n" +\ " <PolicyHeader>\n" +\ " <PolicyName>DEFAULT</PolicyName>\n" +\ " <Version>1.0</Version>\n" +\ " </PolicyHeader>\n" +\ " <SimpleTypeEnforcement>\n" +\ " <SimpleTypeEnforcementTypes>\n" +\ " <Type>SystemManagement</Type>\n" +\ " <Type>__UNLABELED__</Type>\n" +\ " </SimpleTypeEnforcementTypes>\n" +\ " </SimpleTypeEnforcement>\n" +\ " <ChineseWall>\n" +\ " <ChineseWallTypes>\n" +\ " <Type>SystemManagement</Type>\n" +\ " </ChineseWallTypes>\n" +\ " </ChineseWall>\n" +\ " <SecurityLabelTemplate>\n" +\ " <SubjectLabels bootstrap=\"SystemManagement\">\n" +\ " <VirtualMachineLabel>\n" +\ " <Name%s>SystemManagement</Name>\n" +\ " <SimpleTypeEnforcementTypes>\n" +\ " <Type>SystemManagement</Type>\n" +\ " <Type>__UNLABELED__</Type>\n" +\ " </SimpleTypeEnforcementTypes>\n" +\ " <ChineseWallTypes>\n" +\ " <Type/>\n" +\ " </ChineseWallTypes>\n" +\ " </VirtualMachineLabel>\n" +\ " <VirtualMachineLabel>\n" +\ " <Name>__UNLABELED__</Name>\n" +\ " <SimpleTypeEnforcementTypes>\n" +\ " <Type>__UNLABELED__</Type>\n" +\ " </SimpleTypeEnforcementTypes>\n" +\ " <ChineseWallTypes>\n" +\ " <Type/>\n" +\ " </ChineseWallTypes>\n" +\ " </VirtualMachineLabel>\n" +\ " </SubjectLabels>\n" +\ " <ObjectLabels>\n" +\ " <ResourceLabel>\n" +\ " <Name>__UNLABELED__</Name>\n" +\ " <SimpleTypeEnforcementTypes>\n" +\ " <Type>__UNLABELED__</Type>\n" +\ " </SimpleTypeEnforcementTypes>\n" +\ " </ResourceLabel>\n" +\ " </ObjectLabels>\n" +\ " </SecurityLabelTemplate>\n" +\ "</SecurityPolicyDefinition>\n" ACM_SCHEMA="""<?xml version="1.0" encoding="UTF-8"?> <!-- Author: Ray Valdez, Reiner Sailer {rvaldez,sailer}@us.ibm.com --> <!-- This file defines the schema, which is used to define --> <!-- the security policy and the security labels in Xen. --> <xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" targetNamespace="http://www.ibm.com" xmlns="http://www.ibm.com" elementFormDefault="qualified"> <xsd:element name="SecurityPolicyDefinition"> <xsd:complexType> <xsd:sequence> <xsd:element ref="PolicyHeader" minOccurs="1" maxOccurs="1"></xsd:element> <xsd:element ref="SimpleTypeEnforcement" minOccurs="0" maxOccurs="1"></xsd:element> <xsd:element ref="ChineseWall" minOccurs="0" maxOccurs="1"></xsd:element> <xsd:element ref="SecurityLabelTemplate" minOccurs="1" maxOccurs="1"></xsd:element> </xsd:sequence> </xsd:complexType> </xsd:element> <xsd:element name="PolicyHeader"> <xsd:complexType> <xsd:sequence> <xsd:element name="PolicyName" minOccurs="1" maxOccurs="1" type="xsd:string"></xsd:element> <xsd:element name="PolicyUrl" minOccurs="0" maxOccurs="1" type="xsd:string"></xsd:element> <xsd:element name="Reference" type="xsd:string" minOccurs="0" maxOccurs="1" /> <xsd:element name="Date" minOccurs="0" maxOccurs="1" type="xsd:string"></xsd:element> <xsd:element name="NameSpaceUrl" minOccurs="0" maxOccurs="1" type="xsd:string"></xsd:element> <xsd:element name="Version" minOccurs="1" maxOccurs="1" type="VersionFormat"/> <xsd:element ref="FromPolicy" minOccurs="0" maxOccurs="1"/> </xsd:sequence> </xsd:complexType> </xsd:element> <xsd:element name="ChineseWall"> <xsd:complexType> <xsd:sequence> <xsd:element ref="ChineseWallTypes" minOccurs="1" maxOccurs="1" /> <xsd:element ref="ConflictSets" minOccurs="0" maxOccurs="1" /> </xsd:sequence> <xsd:attribute name="priority" type="PolicyOrder" use="optional"></xsd:attribute> </xsd:complexType> </xsd:element> <xsd:element name="SimpleTypeEnforcement"> <xsd:complexType> <xsd:sequence> <xsd:element ref="SimpleTypeEnforcementTypes" /> </xsd:sequence> <xsd:attribute name="priority" type="PolicyOrder" use="optional"></xsd:attribute> </xsd:complexType> </xsd:element> <xsd:element name="SecurityLabelTemplate"> <xsd:complexType> <xsd:sequence> <xsd:element name="SubjectLabels" minOccurs="0" maxOccurs="1"> <xsd:complexType> <xsd:sequence> <xsd:element ref="VirtualMachineLabel" minOccurs="1" maxOccurs="unbounded"></xsd:element> </xsd:sequence> <xsd:attribute name="bootstrap" type="xsd:string" use="required"></xsd:attribute> </xsd:complexType> </xsd:element> <xsd:element name="ObjectLabels" minOccurs="0" maxOccurs="1"> <xsd:complexType> <xsd:sequence> <xsd:element ref="ResourceLabel" minOccurs="1" maxOccurs="unbounded"></xsd:element> </xsd:sequence> </xsd:complexType> </xsd:element> </xsd:sequence> </xsd:complexType> </xsd:element> <xsd:element name="ChineseWallTypes"> <xsd:complexType> <xsd:sequence> <xsd:element maxOccurs="unbounded" minOccurs="1" ref="Type" /> </xsd:sequence> </xsd:complexType> </xsd:element> <xsd:element name="ConflictSets"> <xsd:complexType> <xsd:sequence> <xsd:element maxOccurs="unbounded" minOccurs="1" ref="Conflict" /> </xsd:sequence> </xsd:complexType> </xsd:element> <xsd:element name="SimpleTypeEnforcementTypes"> <xsd:complexType> <xsd:sequence> <xsd:element maxOccurs="unbounded" minOccurs="1" ref="Type" /> </xsd:sequence> </xsd:complexType> </xsd:element> <xsd:element name="Conflict"> <xsd:complexType> <xsd:sequence> <xsd:element maxOccurs="unbounded" minOccurs="1" ref="Type" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required"></xsd:attribute> </xsd:complexType> </xsd:element> <xsd:element name="VirtualMachineLabel"> <xsd:complexType> <xsd:sequence> <xsd:element name="Name" type="NameWithFrom"></xsd:element> <xsd:element ref="SimpleTypeEnforcementTypes" minOccurs="0" maxOccurs="1" /> <xsd:element ref="ChineseWallTypes" minOccurs="0" maxOccurs="unbounded" /> </xsd:sequence> </xsd:complexType> </xsd:element> <xsd:element name="ResourceLabel"> <xsd:complexType> <xsd:sequence> <xsd:element name="Name" type="NameWithFrom"></xsd:element> <xsd:element name="SimpleTypeEnforcementTypes" type="SingleSimpleTypeEnforcementType" /> </xsd:sequence> </xsd:complexType> </xsd:element> <xsd:element name="Name" type="xsd:string" /> <xsd:element name="Type" type="xsd:string" /> <xsd:simpleType name="PolicyOrder"> <xsd:restriction base="xsd:string"> <xsd:enumeration value="PrimaryPolicyComponent"></xsd:enumeration> </xsd:restriction> </xsd:simpleType> <xsd:element name="FromPolicy"> <xsd:complexType> <xsd:sequence> <xsd:element name="PolicyName" minOccurs="1" maxOccurs="1" type="xsd:string"/> <xsd:element name="Version" minOccurs="1" maxOccurs="1" type="VersionFormat"/> </xsd:sequence> </xsd:complexType> </xsd:element> <xsd:simpleType name="VersionFormat"> <xsd:restriction base="xsd:string"> <xsd:pattern value="[0-9]{1,8}.[0-9]{1,8}"></xsd:pattern> </xsd:restriction> </xsd:simpleType> <xsd:complexType name="NameWithFrom"> <xsd:simpleContent> <xsd:extension base="xsd:string"> <xsd:attribute name="from" type="xsd:string" use="optional"></xsd:attribute> </xsd:extension> </xsd:simpleContent> </xsd:complexType> <xsd:complexType name="SingleSimpleTypeEnforcementType"> <xsd:sequence> <xsd:element maxOccurs="1" minOccurs="1" ref="Type" /> </xsd:sequence> </xsd:complexType> </xsd:schema>""" def get_DEFAULT_policy(dom0label=""): fromnode = "" if dom0label != "": fromnode = " from=\"%s\"" % dom0label return DEFAULT_policy % fromnode def initialize(): xoptions = XendOptions.instance() basedir = xoptions.get_xend_security_path() policiesdir = basedir + "/policies" mkdir.parents(policiesdir, stat.S_IRWXU) instdir = security.install_policy_dir_prefix DEF_policy_file = "DEFAULT-security_policy.xml" #Install default policy. f = open(policiesdir + "/" + DEF_policy_file, 'w') if f: f.write(get_DEFAULT_policy()) f.close() else: log.error("Could not write the default policy's file.") defpol = ACMPolicy(xml=get_DEFAULT_policy()) defpol.compile() class ACMPolicy(XSPolicy): """ ACMPolicy class. Implements methods for getting information from the XML representation of the policy as well as compilation and loading of a policy into the HV. """ def __init__(self, name=None, dom=None, ref=None, xml=None): if name: self.name = name try: self.dom = minidom.parse(self.path_from_policy_name(name)) except Exception, e: raise SecurityError(-xsconstants.XSERR_XML_PROCESSING, str(e)) elif dom: self.dom = dom self.name = self.get_name() elif xml: try: self.dom = minidom.parseString(xml) except Exception, e: raise SecurityError(-xsconstants.XSERR_XML_PROCESSING, str(e)) self.name = self.get_name() rc = self.validate() if rc != xsconstants.XSERR_SUCCESS: raise SecurityError(rc) if ref: from xen.xend.XendXSPolicy import XendACMPolicy self.xendacmpolicy = XendACMPolicy(self, {}, ref) else: self.xendacmpolicy = None XSPolicy.__init__(self, name=self.name, ref=ref) def get_dom(self): return self.dom def get_name(self): return self.policy_dom_get_hdr_item("PolicyName") def get_type(self): return xsconstants.XS_POLICY_ACM def get_type_name(self): return xsconstants.ACM_POLICY_ID def __str__(self): return self.get_name() def validate(self): """ validate against the policy's schema Does not fail if the libxml2 python lib is not installed """ rc = xsconstants.XSERR_SUCCESS try: import libxml2 except Exception, e: log.warn("Libxml2 python-wrapper is not installed on the system.") return xsconstants.XSERR_SUCCESS try: parserctxt = libxml2.schemaNewMemParserCtxt(ACM_SCHEMA, len(ACM_SCHEMA)) schemaparser = parserctxt.schemaParse() valid = schemaparser.schemaNewValidCtxt() doc = libxml2.parseDoc(self.toxml()) if doc.schemaValidateDoc(valid) != 0: rc = -xsconstants.XSERR_BAD_XML except Exception, e: log.warn("Problem with the schema: %s" % str(e)) rc = -xsconstants.XSERR_GENERAL_FAILURE if rc != xsconstants.XSERR_SUCCESS: log.warn("XML did not validate against schema") if rc == xsconstants.XSERR_SUCCESS: rc = self.__validate_name_and_labels() return rc def __validate_name_and_labels(self): """ no ':' allowed in the policy name and the labels """ if ':' in self.get_name(): return -xsconstants.XSERR_BAD_POLICY_NAME for s in self.policy_get_resourcelabel_names(): if ':' in s: return -xsconstants.XSERR_BAD_LABEL for s in self.policy_get_virtualmachinelabel_names(): if ':' in s: return -xsconstants.XSERR_BAD_LABEL return xsconstants.XSERR_SUCCESS def is_default_policy(self): """ Determine whether this is the default policy """ default = ['SystemManagement', ACM_LABEL_UNLABELED ] if self.policy_get_virtualmachinelabel_names() == default and \ self.policy_get_bootstrap_vmlabel() == default[0] and \ self.policy_get_stetypes_types() == default and \ self.policy_get_stes_of_vmlabel(default[0]) == default and \ self.policy_get_stes_of_vmlabel(default[1]) == [default[1]] and \ self.policy_get_resourcelabel_names() == [default[1]] and \ self.policy_get_chwall_types() == [ default[0] ] and \ self.get_name() == "DEFAULT": return True return False def update(self, xml_new): """ Update the policy with the new XML. The hypervisor decides whether the new policy can be applied. """ rc = -xsconstants.XSERR_XML_PROCESSING errors = "" acmpol_old = self try: acmpol_new = ACMPolicy(xml=xml_new) except Exception: return -xsconstants.XSERR_XML_PROCESSING, errors vmlabel_map = acmpol_new.policy_get_vmlabel_translation_map() # An update requires version information in the current # and new policy. The version number of the current policy # must be the same as what is in the FromPolicy/Version node # in the new one and the current policy's name must be the # same as in FromPolicy/PolicyName # The default policy when it is set skips this step. if not acmpol_new.is_default_policy() and \ not acmpol_old.is_default_policy(): irc = self.__do_update_version_check(acmpol_new) if irc != xsconstants.XSERR_SUCCESS: return irc, errors if self.isloaded(): newvmnames = \ acmpol_new.policy_get_virtualmachinelabel_names_sorted() oldvmnames = \ acmpol_old.policy_get_virtualmachinelabel_names_sorted() del_array = "" chg_array = "" for o in oldvmnames: if o not in newvmnames: old_idx = oldvmnames.index(o) if vmlabel_map.has_key(o): #not a deletion, but a renaming new = vmlabel_map[o] new_idx = newvmnames.index(new) chg_array += struct.pack("ii", old_idx, new_idx) else: del_array += struct.pack("i", old_idx) for v in newvmnames: if v in oldvmnames: old_idx = oldvmnames.index(v) new_idx = newvmnames.index(v) if old_idx != new_idx: chg_array += struct.pack("ii", old_idx, new_idx) # VM labels indicated in the 'from' attribute of a VM or # resource node but that did not exist in the old policy # are considered bad labels. bad_renamings = set(vmlabel_map.keys()) - set(oldvmnames) if len(bad_renamings) > 0: log.error("Bad VM label renamings: %s" % list(bad_renamings)) return -xsconstants.XSERR_BAD_LABEL, errors reslabel_map = acmpol_new.policy_get_reslabel_translation_map() oldresnames = acmpol_old.policy_get_resourcelabel_names() bad_renamings = set(reslabel_map.keys()) - set(oldresnames) if len(bad_renamings) > 0: log.error("Bad resource label renamings: %s" % list(bad_renamings)) return -xsconstants.XSERR_BAD_LABEL, errors #Get binary and map from the new policy rc, pol_map, bin_pol = acmpol_new.policy_create_map_and_bin() if rc != xsconstants.XSERR_SUCCESS: log.error("Could not build the map and binary policy.") return rc, errors #Need to do / check the following: # - relabel all resources where there is a 'from' field in # the policy and mark those as unlabeled where the label # does not appear in the new policy anymore # - relabel all VMs where there is a 'from' field in the # policy and mark those as unlabeled where the label # does not appear in the new policy anymore; no running # or paused VM may be unlabeled through this # - check that under the new labeling conditions the VMs # still have access to their resources as before. Unlabeled # resources are inaccessible. If this check fails, the # update failed. # - Attempt changes in the hypervisor; if this step fails, # roll back the relabeling of resources and VMs # - Commit the relabeling of resources rc, errors = security.change_acm_policy(bin_pol, del_array, chg_array, vmlabel_map, reslabel_map, self, acmpol_new, acmpol_new.is_default_policy()) if rc == 0: # Replace the old DOM with the new one and save it self.dom = acmpol_new.dom self.compile() log.info("ACM policy update was successful") else: #Not loaded in HV self.dom = acmpol_new.dom rc = self.compile() return rc, errors def force_default_policy(klass, policy_ref): """ Force the installation of the DEFAULT policy if for example no XML of the current policy is available and the update path with comparisons of old and new policy cannot be taken. This only succeeds if only Domain-0 is running or all guest have the same ssidref as Domain-0. """ errors = "" acmpol_new = ACMPolicy(xml = get_DEFAULT_policy(), ref=policy_ref) from xen.lowlevel import acm dom0_ssidref = acm.getssid(0) del_array = "" chg_array = struct.pack("ii", dom0_ssidref['ssidref'] & 0xffff, 0x1) rc, pol_map, bin_pol = acmpol_new.policy_create_map_and_bin() if rc != xsconstants.XSERR_SUCCESS: return rc, errors, acmpol_new rc, errors = security.hv_chg_policy(bin_pol, del_array, chg_array) return rc, errors, acmpol_new force_default_policy = classmethod(force_default_policy) def get_reset_policy_xml(klass): dom0_label = security.get_ssid(0)[1] return get_DEFAULT_policy(dom0_label) get_reset_policy_xml = classmethod(get_reset_policy_xml) def __do_update_version_check(self, acmpol_new): acmpol_old = self now_vers = acmpol_old.policy_dom_get_hdr_item("Version") now_name = acmpol_old.policy_dom_get_hdr_item("PolicyName") req_oldvers = acmpol_new.policy_dom_get_frompol_item("Version") req_oldname = acmpol_new.policy_dom_get_frompol_item("PolicyName") if now_vers == "" or \ now_vers != req_oldvers or \ now_name != req_oldname: log.info("Policy rejected: %s != %s or %s != %s" % \ (now_vers,req_oldvers,now_name,req_oldname)) return -xsconstants.XSERR_VERSION_PREVENTS_UPDATE if not self.isVersionUpdate(acmpol_new): log.info("Policy rejected since new version is not an update.") return -xsconstants.XSERR_VERSION_PREVENTS_UPDATE return xsconstants.XSERR_SUCCESS def compareVersions(self, v1, v2): """ Compare two policy versions given their tuples of major and minor. Return '0' if versions are equal, '>0' if v1 > v2 and '<' if v1 < v2 """ rc = v1[0] - v2[0] if rc == 0: rc = v1[1] - v2[1] return rc def getVersionTuple(self, item="Version"): v_str = self.policy_dom_get_hdr_item(item) return self.__convVersionToTuple(v_str) def get_version(self): return self.policy_dom_get_hdr_item("Version") def isVersionUpdate(self, polnew): if self.compareVersions(polnew.getVersionTuple(), self.getVersionTuple()) > 0: return True return False def __convVersionToTuple(self, v_str): """ Convert a version string, formatted according to the scheme "%d.%d" into a tuple of (major, minor). Return (0,0) if the string is empty. """ major = 0 minor = 0 if v_str != "": tmp = v_str.split(".") major = int(tmp[0]) if len(tmp) > 1: minor = int(tmp[1]) return (major, minor) def get_policies_path(self): xoptions = XendOptions.instance() basedir = xoptions.get_xend_security_path() return basedir + "/policies/" def policy_path(self, name): prefix = self.get_policies_path() path = prefix + name.replace('.','/') _path = path.split("/") del _path[-1] mkdir.parents("/".join(_path), stat.S_IRWXU) return path def path_from_policy_name(self, name): return self.policy_path(name) + "-security_policy.xml" # # Functions interacting with the bootloader # def vmlabel_to_ssidref(self, vm_label): """ Convert a VMlabel into an ssidref given the current policy Return xsconstants.INVALID_SSIDREF if conversion failed. """ ssidref = xsconstants.INVALID_SSIDREF names = self.policy_get_virtualmachinelabel_names_sorted() try: vmidx = names.index(vm_label) ssidref = (vmidx << 16) | vmidx except: pass return ssidref def set_vm_bootlabel(self, vm_label, remove=False): parms="<>" if vm_label != "": ssidref = self.vmlabel_to_ssidref(vm_label) if ssidref == xsconstants.INVALID_SSIDREF: return -xsconstants.XSERR_BAD_LABEL parms = "0x%08x:%s:%s:%s" % \ (ssidref, xsconstants.ACM_POLICY_ID, \ self.get_name(),vm_label) else: ssidref = 0 #Identifier for removal if remove == True: parms = "<>" try: def_title = bootloader.get_default_title() bootloader.set_kernel_attval(def_title, "ssidref", parms) except: return -xsconstants.XSERR_GENERAL_FAILURE return ssidref # # Utility functions related to the policy's files # def get_filename(self, postfix, prefix=None, dotted=False): """ Create the filename for the policy. The prefix is prepended to the path. If dotted is True, then a policy name like 'a.b.c' will remain as is, otherwise it will become 'a/b/c' """ if prefix == None: prefix = self.get_policies_path() name = self.get_name() if name: p = name.split(".") path = "" if dotted: sep = "." else: sep = "/" if len(p) > 1: path = sep.join(p[0:len(p)-1]) if prefix != "" or path != "": allpath = prefix + path + sep + p[-1] + postfix else: allpath = p[-1] + postfix return allpath return None def __readfile(self, name): cont = "" filename = self.get_filename(name) f = open(filename, "r") if f: cont = f.read() f.close() return cont def get_map(self): return self.__readfile(".map") def get_bin(self): return self.__readfile(".bin") def copy_policy_file(self, suffix, destdir): spolfile = self.get_filename(suffix) dpolfile = destdir + "/" + self.get_filename(suffix,"",dotted=True) try: shutil.copyfile(spolfile, dpolfile) except Exception, e: log.error("Could not copy policy file %s to %s: %s" % (spolfile, dpolfile, str(e))) return -xsconstants.XSERR_FILE_ERROR return xsconstants.XSERR_SUCCESS # # DOM-related functions # def policy_dom_get(self, parent, key, createit=False): for node in parent.childNodes: if node.nodeType == Node.ELEMENT_NODE: if node.nodeName == key: return node if createit: self.dom_create_node(parent, key) return self.policy_dom_get(parent, key) def dom_create_node(self, parent, newname, value=" "): xml = "<a><"+newname+">"+ value +"</"+newname+"></a>" frag = minidom.parseString(xml) frag.childNodes[0].nodeType = Node.DOCUMENT_FRAGMENT_NODE parent.appendChild(frag.childNodes[0]) return frag.childNodes[0] def dom_get_node(self, path, createit=False): node = None parts = path.split("/") doc = self.get_dom() if len(parts) > 0: node = self.policy_dom_get(doc.documentElement, parts[0]) if node: i = 1 while i < len(parts): _node = self.policy_dom_get(node, parts[i], createit) if not _node: if not createit: break else: self.dom_create_node(node, parts[i]) _node = self.policy_dom_get(node, parts[i]) node = _node i += 1 return node # # Header-related functions # def policy_dom_get_header_subnode(self, nodename): node = self.dom_get_node("PolicyHeader/%s" % nodename) return node def policy_dom_get_hdr_item(self, name, default=""): node = self.policy_dom_get_header_subnode(name) if node and len(node.childNodes) > 0: return node.childNodes[0].nodeValue return default def policy_dom_get_frompol_item(self, name, default="", createit=False): node = self.dom_get_node("PolicyHeader/FromPolicy",createit) if node: node = self.policy_dom_get(node, name, createit) if node and len(node.childNodes) > 0: return node.childNodes[0].nodeValue return default def get_header_fields_map(self): header = { 'policyname' : self.policy_dom_get_hdr_item("PolicyName"), 'policyurl' : self.policy_dom_get_hdr_item("PolicyUrl"), 'reference' : self.policy_dom_get_hdr_item("Reference"), 'date' : self.policy_dom_get_hdr_item("Date"), 'namespaceurl' : self.policy_dom_get_hdr_item("NameSpaceUrl"), 'version' : self.policy_dom_get_hdr_item("Version") } return header def set_frompolicy_name(self, name): """ For tools to adapt the header of the policy """ node = self.dom_get_node("PolicyHeader/FromPolicy/PolicyName", createit=True) node.childNodes[0].nodeValue = name def set_frompolicy_version(self, version): """ For tools to adapt the header of the policy """ node = self.dom_get_node("PolicyHeader/FromPolicy/Version", createit=True) node.childNodes[0].nodeValue = version def set_policy_name(self, name): """ For tools to adapt the header of the policy """ node = self.dom_get_node("PolicyHeader/PolicyName") node.childNodes[0].nodeValue = name def set_policy_version(self, version): """ For tools to adapt the header of the policy """ node = self.dom_get_node("PolicyHeader/Version") node.childNodes[0].nodeValue = version def update_frompolicy(self, curpol): self.set_frompolicy_name(curpol.policy_dom_get_hdr_item("PolicyName")) version = curpol.policy_dom_get_hdr_item("Version") self.set_frompolicy_version(version) (maj, minor) = self.__convVersionToTuple(version) self.set_policy_version("%s.%s" % (maj, minor+1)) # # Get all types that are part of a node # def policy_get_types(self, node): strings = [] i = 0 while i < len(node.childNodes): if node.childNodes[i].nodeName == "Type" and \ len(node.childNodes[i].childNodes) > 0: strings.append(node.childNodes[i].childNodes[0].nodeValue) i += 1 return strings # # Simple Type Enforcement-related functions # def policy_get_stetypes_node(self): node = self.dom_get_node("SimpleTypeEnforcement/SimpleTypeEnforcementTypes") return node def policy_get_stetypes_types(self): strings = [] node = self.policy_get_stetypes_node() if node: strings = self.policy_get_types(node) return strings # # Chinese Wall Type-related functions # def policy_get_chwall_types(self): strings = [] node = self.dom_get_node("ChineseWall/ChineseWallTypes") if node: strings = self.policy_get_types(node) return strings def policy_get_chwall_cfses(self): cfs = [] node = self.dom_get_node("ChineseWall/ConflictSets") if node: i = 0 while i < len(node.childNodes): _cfs = {} if node.childNodes[i].nodeName == "Conflict": _cfs['name'] = node.childNodes[i].getAttribute('name') _cfs['chws'] = self.policy_get_types(node.childNodes[i]) cfs.append(_cfs) i += 1 return cfs def policy_get_chwall_cfses_names_sorted(self): """ Return the list of all conflict set names in alphabetical order. """ cfs_names = [] node = self.dom_get_node("ChineseWall/ConflictSets") if node: i = 0 while i < len(node.childNodes): if node.childNodes[i].nodeName == "Conflict": n = node.childNodes[i].getAttribute('name') #it better have a name! if n: cfs_names.append(n) i += 1 cfs_names.sort() return cfs_names # # Subject Label-related functions # def policy_get_bootstrap_vmlabel(self): node = self.dom_get_node("SecurityLabelTemplate/SubjectLabels") if node: vmlabel = node.getAttribute("bootstrap") return vmlabel # Get the names of all virtual machine labels; returns an array def policy_get_virtualmachinelabel_names(self): strings = [] node = self.dom_get_node("SecurityLabelTemplate/SubjectLabels") if node: i = 0 while i < len(node.childNodes): if node.childNodes[i].nodeName == "VirtualMachineLabel": name = self.policy_dom_get(node.childNodes[i], "Name") if len(name.childNodes) > 0: strings.append(name.childNodes[0].nodeValue) i += 1 return strings def policy_sort_virtualmachinelabel_names(self, vmnames): bootstrap = self.policy_get_bootstrap_vmlabel() if bootstrap not in vmnames: raise SecurityError(-xsconstants.XSERR_POLICY_INCONSISTENT) vmnames.remove(bootstrap) vmnames.sort() vmnames.insert(0, bootstrap) if ACM_LABEL_UNLABELED in vmnames: vmnames.remove(ACM_LABEL_UNLABELED) vmnames.insert(0, ACM_LABEL_UNLABELED) return vmnames def policy_get_virtualmachinelabel_names_sorted(self): """ Get a sorted list of VMlabel names. The bootstrap VM's label will be the first one in that list, followed by an alphabetically sorted list of VM label names """ vmnames = self.policy_get_virtualmachinelabel_names() res = self.policy_sort_virtualmachinelabel_names(vmnames) if res[0] != ACM_LABEL_UNLABELED: res.insert(0, ACM_LABEL_UNLABELED) return res def policy_get_virtualmachinelabels(self): """ Get a list of all virtual machine labels in this policy """ res = [] node = self.dom_get_node("SecurityLabelTemplate/SubjectLabels") if node: i = 0 while i < len(node.childNodes): if node.childNodes[i].nodeName == "VirtualMachineLabel": name = self.policy_dom_get(node.childNodes[i], "Name") if len(name.childNodes) > 0: _res = {} _res['type'] = xsconstants.ACM_LABEL_VM _res['name'] = name.childNodes[0].nodeValue stes = self.policy_dom_get(node.childNodes[i], "SimpleTypeEnforcementTypes") if stes: _res['stes'] = self.policy_get_types(stes) else: _res['stes'] = [] chws = self.policy_dom_get(node.childNodes[i], "ChineseWallTypes") if chws: _res['chws'] = self.policy_get_types(chws) else: _res['chws'] = [] res.append(_res) i += 1 return res def policy_get_stes_of_vmlabel(self, vmlabel): """ Get a list of all STEs of a given VMlabel """ return self.__policy_get_stes_of_labeltype(vmlabel, "/SubjectLabels", "VirtualMachineLabel") def policy_get_stes_of_resource(self, reslabel): """ Get a list of all resources of a given VMlabel """ return self.__policy_get_stes_of_labeltype(reslabel, "/ObjectLabels", "ResourceLabel") def __policy_get_stes_of_labeltype(self, label, path, labeltype): node = self.dom_get_node("SecurityLabelTemplate" + path) if node: i = 0 while i < len(node.childNodes): if node.childNodes[i].nodeName == labeltype: name = self.policy_dom_get(node.childNodes[i], "Name") if len(name.childNodes) > 0 and \ name.childNodes[0].nodeValue == label: stes = self.policy_dom_get(node.childNodes[i], "SimpleTypeEnforcementTypes") if not stes: return [] return self.policy_get_types(stes) i += 1 return [] def policy_check_vmlabel_against_reslabels(self, vmlabel, resources): """ Check whether the given vmlabel is compatible with the given resource labels. Do this by getting all the STEs of the vmlabel and the STEs of the resources. Any STE type of the VM label must match an STE type of the resource. """ vm_stes = self.policy_get_stes_of_vmlabel(vmlabel) if len(vm_stes) == 0: return False for res in resources: res_stes = self.policy_get_stes_of_resource(res) if len(res_stes) == 0 or \ len( set(res_stes).intersection( set(vm_stes) ) ) == 0: return False return True def __policy_get_label_translation_map(self, path, labeltype): res = {} node = self.dom_get_node("SecurityLabelTemplate/" + path) if node: i = 0 while i < len(node.childNodes): if node.childNodes[i].nodeName == labeltype: name = self.policy_dom_get(node.childNodes[i], "Name") from_name = name.getAttribute("from") if from_name and len(name.childNodes) > 0: res.update({from_name : name.childNodes[0].nodeValue}) i += 1 return res def policy_get_vmlabel_translation_map(self): """ Get a dictionary of virtual machine mappings from their old VMlabel name to the new VMlabel name. """ return self.__policy_get_label_translation_map("SubjectLabels", "VirtualMachineLabel") def policy_get_reslabel_translation_map(self): """ Get a dictionary of resource mappings from their old resource label name to the new resource label name. """ return self.__policy_get_label_translation_map("ObjectLabels", "ResourceLabel") # # Object Label-related functions # def policy_get_resourcelabel_names(self): """ Get the names of all resource labels in an array but only those that actually have types """ strings = [] node = self.dom_get_node("SecurityLabelTemplate/ObjectLabels") if node: i = 0 while i < len(node.childNodes): if node.childNodes[i].nodeName == "ResourceLabel": name = self.policy_dom_get(node.childNodes[i], "Name") stes = self.policy_dom_get(node.childNodes[i], "SimpleTypeEnforcementTypes") if stes and len(name.childNodes) > 0: strings.append(name.childNodes[0].nodeValue) i += 1 return strings def policy_get_resourcelabels(self): """ Get all information about all resource labels of this policy. """ res = [] node = self.dom_get_node("SecurityLabelTemplate/ObjectLabels") if node: i = 0 while i < len(node.childNodes): if node.childNodes[i].nodeName == "ResourceLabel": name = self.policy_dom_get(node.childNodes[i], "Name") if len(name.childNodes) > 0: _res = {} _res['type'] = xsconstants.ACM_LABEL_RES _res['name'] = name.childNodes[0].nodeValue stes = self.policy_dom_get(node.childNodes[i], "SimpleTypeEnforcementTypes") if stes: _res['stes'] = self.policy_get_types(stes) else: _res['stes'] = [] _res['chws'] = [] res.append(_res) i += 1 return res def policy_find_reslabels_with_stetype(self, stetype): """ Find those resource labels that hold a given STE type. """ res = [] reslabels = self.policy_get_resourcelabels() for resl in reslabels: if stetype in resl['stes']: res.append(resl['name']) return res def toxml(self): dom = self.get_dom() if dom: return dom.toxml() return None def hash(self): """ Calculate a SHA1 hash of the XML policy """ return sha1(self.toxml()) def save(self): ### Save the XML policy into a file ### rc = -xsconstants.XSERR_FILE_ERROR name = self.get_name() if name: path = self.path_from_policy_name(name) if path: f = open(path, 'w') if f: try: try: f.write(self.toxml()) rc = 0 except: pass finally: f.close() return rc def __write_to_file(self, suffix, data): #write the data into a file with the given suffix f = open(self.get_filename(suffix),"w") if f: try: try: f.write(data) except Exception, e: log.error("Error writing file: %s" % str(e)) return -xsconstants.XSERR_FILE_ERROR finally: f.close() else: return -xsconstants.XSERR_FILE_ERROR return xsconstants.XSERR_SUCCESS def compile(self): rc = self.save() if rc == 0: rc, mapfile, bin_pol = self.policy_create_map_and_bin() if rc == 0: try: security.mapfile_lock() rc = self.__write_to_file(".map", mapfile) if rc != 0: log.error("Error writing map file") finally: security.mapfile_unlock() if rc == 0: rc = self.__write_to_file(".bin", bin_pol) if rc != 0: log.error("Error writing binary policy file") return rc def loadintohv(self): """ load this policy into the hypervisor if successful,the policy's flags will indicate that the policy is the one loaded into the hypervisor """ if not self.isloaded(): (ret, output) = commands.getstatusoutput( security.xensec_tool + " loadpolicy " + self.get_filename(".bin")) if ret != 0: return -xsconstants.XSERR_POLICY_LOAD_FAILED return xsconstants.XSERR_SUCCESS def isloaded(self): """ Determine whether this policy is the active one. """ if self.get_name() == security.get_active_policy_name(): return True return False def destroy(self): """ Destroy the policy including its binary, mapping and XML files. This only works if the policy is not the one that's loaded """ if self.isloaded(): return -xsconstants.XSERR_POLICY_LOADED files = [ self.get_filename(".map",""), self.get_filename(".bin","") ] for f in files: try: os.unlink(f) except: pass if self.xendacmpolicy: self.xendacmpolicy.destroy() XSPolicy.destroy(self) return xsconstants.XSERR_SUCCESS def policy_get_domain_label(self, domid): """ Given a domain's ID, retrieve the label it has using its ssidref for reverse calculation. """ try: mgmt_dom = security.get_ssid(domid) except: return "" return self.policy_get_domain_label_by_ssidref(int(mgmt_dom[3])) def policy_get_domain_label_by_ssidref(self, ssidref): """ Given an ssidref, find the corresponding VM label """ chwall_ref = ssidref & 0xffff try: allvmtypes = self.policy_get_virtualmachinelabel_names_sorted() except: return None return allvmtypes[chwall_ref] def policy_get_domain_label_formatted(self, domid): label = self.policy_get_domain_label(domid) if label == "": label = ACM_LABEL_UNLABELED return "%s:%s:%s" % (xsconstants.ACM_POLICY_ID, self.get_name(), label) def policy_get_domain_label_by_ssidref_formatted(self, ssidref): label = self.policy_get_domain_label_by_ssidref(ssidref) if label == "": return "" return "%s:%s:%s" % (xsconstants.ACM_POLICY_ID, self.get_name(), label) def policy_create_map_and_bin(self): """ Create the policy's map and binary files -- compile the policy. """ def roundup8(len): return ((len + 7) & ~7) rc = xsconstants.XSERR_SUCCESS mapfile = "" primpolcode = ACM_POLICY_UNDEFINED secpolcode = ACM_POLICY_UNDEFINED unknown_ste = set() unknown_chw = set() unlabeled_ste = "__NULL_LABEL__" unlabeled_chw = "__NULL_LABEL__" rc = self.validate() if rc: return rc, "", "" stes = self.policy_get_stetypes_types() if stes: stes.sort() chws = self.policy_get_chwall_types() if chws: chws.sort() vms = self.policy_get_virtualmachinelabels() bootstrap = self.policy_get_bootstrap_vmlabel() vmlabels = self.policy_get_virtualmachinelabel_names_sorted() if bootstrap not in vmlabels: log.error("Bootstrap label '%s' not found among VM labels '%s'." \ % (bootstrap, vmlabels)) return -xsconstants.XSERR_POLICY_INCONSISTENT, "", "" vms_with_chws = [] chws_by_vm = { ACM_LABEL_UNLABELED : [] } for v in vms: if v.has_key("chws"): vms_with_chws.append(v["name"]) chws_by_vm[v["name"]] = v["chws"] if bootstrap in vms_with_chws: vms_with_chws.remove(bootstrap) vms_with_chws.sort() vms_with_chws.insert(0, bootstrap) else: vms_with_chws.sort() if ACM_LABEL_UNLABELED in vms_with_chws: unlabeled_chw = ACM_LABEL_UNLABELED vms_with_chws.remove(ACM_LABEL_UNLABELED) ; # @1 vms_with_stes = [] stes_by_vm = { ACM_LABEL_UNLABELED : [] } for v in vms: if v.has_key("stes"): vms_with_stes.append(v["name"]) stes_by_vm[v["name"]] = v["stes"] if bootstrap in vms_with_stes: vms_with_stes.remove(bootstrap) vms_with_stes.sort() vms_with_stes.insert(0, bootstrap) else: vms_with_stes.sort() if ACM_LABEL_UNLABELED in vms_with_stes: unlabeled_ste = ACM_LABEL_UNLABELED vms_with_stes.remove(ACM_LABEL_UNLABELED) ; # @2 resnames = self.policy_get_resourcelabel_names() resnames.sort() stes_by_res = {} res = self.policy_get_resourcelabels() for r in res: if r.has_key("stes"): stes_by_res[r["name"]] = r["stes"] if ACM_LABEL_UNLABELED in resnames: resnames.remove(ACM_LABEL_UNLABELED) # check for duplicate labels if len(vmlabels) != len(set(vmlabels)) or \ len(resnames) != len(set(resnames)) or \ len(stes) != len(set(stes)) or \ len(chws) != len(set(chws)): return -xsconstants.XSERR_POLICY_HAS_DUPLICATES, "", "" max_chw_ssids = 1 + len(vms_with_chws) max_chw_types = 1 + len(vms_with_chws) max_ste_ssids = 1 + len(vms_with_stes) + len(resnames) max_ste_types = 1 + len(vms_with_stes) + len(resnames) mapfile = "POLICYREFERENCENAME %s\n" % self.get_name() mapfile += "MAGIC %08x\n" % ACM_MAGIC mapfile += "POLICFILE %s\n" % \ self.path_from_policy_name(self.get_name()) mapfile += "BINARYFILE %s\n" % self.get_filename(".bin") mapfile += "MAX-CHWALL-TYPES %08x\n" % len(chws) mapfile += "MAX-CHWALL-SSIDS %08x\n" % max_chw_ssids mapfile += "MAX-CHWALL-LABELS %08x\n" % max_chw_ssids mapfile += "MAX-STE-TYPES %08x\n" % len(stes) mapfile += "MAX-STE-SSIDS %08x\n" % max_ste_ssids mapfile += "MAX-STE-LABELS %08x\n" % max_ste_ssids mapfile += "\n" if chws: mapfile += \ "PRIMARY CHWALL\n" primpolcode = ACM_CHINESE_WALL_POLICY if stes: mapfile += \ "SECONDARY STE\n" else: mapfile += \ "SECONDARY NULL\n" secpolcode = ACM_SIMPLE_TYPE_ENFORCEMENT_POLICY else: if stes: mapfile += \ "PRIMARY STE\n" primpolcode = ACM_SIMPLE_TYPE_ENFORCEMENT_POLICY mapfile += \ "SECONDARY NULL\n" mapfile += "\n" if len(vms_with_chws) > 0: mapfile += \ "LABEL->SSID ANY CHWALL %-20s %x\n" % \ (unlabeled_chw, 0) i = 0 for v in vms_with_chws: mapfile += \ "LABEL->SSID VM CHWALL %-20s %x\n" % \ (v, i+1) i += 1 mapfile += "\n" if len(vms_with_stes) > 0 or len(resnames) > 0: mapfile += \ "LABEL->SSID ANY STE %-20s %08x\n" % \ (unlabeled_ste, 0) i = 0 for v in vms_with_stes: mapfile += \ "LABEL->SSID VM STE %-20s %x\n" % (v, i+1) i += 1 j = 0 for r in resnames: mapfile += \ "LABEL->SSID RES STE %-20s %x\n" % (r, j+i+1) j += 1 mapfile += "\n" if vms_with_chws: mapfile += \ "SSID->TYPE CHWALL %08x\n" % 0 i = 1 for v in vms_with_chws: mapfile += \ "SSID->TYPE CHWALL %08x" % i for c in chws_by_vm[v]: mapfile += " %s" % c mapfile += "\n" i += 1 mapfile += "\n" if len(vms_with_stes) > 0 or len(resnames) > 0: mapfile += \ "SSID->TYPE STE %08x\n" % 0 i = 1 for v in vms_with_stes: mapfile += \ "SSID->TYPE STE %08x" % i for s in stes_by_vm[v]: mapfile += " %s" % s mapfile += "\n" i += 1 for r in resnames: mapfile += \ "SSID->TYPE STE %08x" % i for s in stes_by_res[r]: mapfile += " %s" % s mapfile += "\n" i += 1 mapfile += "\n" if chws: i = 0 while i < len(chws): mapfile += \ "TYPE CHWALL %-20s %d\n" % (chws[i], i) i += 1 mapfile += "\n" if stes: i = 0 while i < len(stes): mapfile += \ "TYPE STE %-20s %d\n" % (stes[i], i) i += 1 mapfile += "\n" mapfile += "\n" # Build header with policy name length = roundup8(4 + len(self.get_name()) + 1) polname = self.get_name(); pr_bin = struct.pack("!i", len(polname)+1) pr_bin += polname; while len(pr_bin) < length: pr_bin += "\x00" # Build chinese wall part vms_with_chws.insert(0, ACM_LABEL_UNLABELED) cfses_names = self.policy_get_chwall_cfses_names_sorted() cfses = self.policy_get_chwall_cfses() chwformat = "!iiiiiiiii" max_chw_cfs = len(cfses) chw_ssid_offset = struct.calcsize(chwformat) chw_confset_offset = chw_ssid_offset + \ 2 * len(chws) * max_chw_types chw_running_types_offset = 0 chw_conf_agg_offset = 0 chw_bin = struct.pack(chwformat, ACM_CHWALL_VERSION, ACM_CHINESE_WALL_POLICY, len(chws), max_chw_ssids, max_chw_cfs, chw_ssid_offset, chw_confset_offset, chw_running_types_offset, chw_conf_agg_offset) chw_bin_body = "" # VMs that are listed and their chinese walls for v in vms_with_chws: for c in chws: unknown_chw |= (set(chws_by_vm[v]) - set(chws)) if c in chws_by_vm[v]: chw_bin_body += struct.pack("!h",1) else: chw_bin_body += struct.pack("!h",0) # Conflict sets -- they need to be processed in alphabetical order for cn in cfses_names: if cn == "" or cn is None: return -xsconstants.XSERR_BAD_CONFLICTSET, "", "" i = 0 while i < len(cfses): if cfses[i]['name'] == cn: conf = cfses[i]['chws'] break i += 1 for c in chws: if c in conf: chw_bin_body += struct.pack("!h",1) else: chw_bin_body += struct.pack("!h",0) del cfses[i] if len(cfses) != 0: return -xsconstants.XSERR_BAD_CONFLICTSET, "", "" chw_bin += chw_bin_body while len(chw_bin) < roundup8(len(chw_bin)): chw_bin += "\x00" # Build STE part vms_with_stes.insert(0, ACM_LABEL_UNLABELED) # Took out in @2 steformat="!iiiii" ste_bin = struct.pack(steformat, ACM_STE_VERSION, ACM_SIMPLE_TYPE_ENFORCEMENT_POLICY, len(stes), max_ste_types, struct.calcsize(steformat)) ste_bin_body = "" if stes: # VMs that are listed and their STE types for v in vms_with_stes: unknown_ste |= (set(stes_by_vm[v]) - set(stes)) for s in stes: if s in stes_by_vm[v]: ste_bin_body += struct.pack("!h",1) else: ste_bin_body += struct.pack("!h",0) for r in resnames: unknown_ste |= (set(stes_by_res[r]) - set(stes)) for s in stes: if s in stes_by_res[r]: ste_bin_body += struct.pack("!h",1) else: ste_bin_body += struct.pack("!h",0) ste_bin += ste_bin_body; while len(ste_bin) < roundup8(len(ste_bin)): ste_bin += "\x00" #Write binary header: headerformat="!iiiiiiiiii20s" totallen_bin = struct.calcsize(headerformat) + \ len(pr_bin) + len(chw_bin) + len(ste_bin) polref_offset = struct.calcsize(headerformat) primpoloffset = polref_offset + len(pr_bin) if primpolcode == ACM_CHINESE_WALL_POLICY: secpoloffset = primpoloffset + len(chw_bin) elif primpolcode == ACM_SIMPLE_TYPE_ENFORCEMENT_POLICY: secpoloffset = primpoloffset + len(ste_bin) else: secpoloffset = primpoloffset (major, minor) = self.getVersionTuple() hdr_bin = struct.pack(headerformat, ACM_MAGIC, ACM_POLICY_VERSION, totallen_bin, polref_offset, primpolcode, primpoloffset, secpolcode, secpoloffset, major, minor, self.hash().digest()) all_bin = array.array('B') for s in [ hdr_bin, pr_bin, chw_bin, ste_bin ]: for c in s: all_bin.append(ord(c)) log.info("Compiled policy: rc = %s" % hex(rc)) if len(unknown_ste) > 0: log.info("The following STEs in VM/res labels were unknown:" \ " %s" % list(unknown_ste)) rc = -xsconstants.XSERR_BAD_LABEL if len(unknown_chw) > 0: log.info("The following Ch. Wall types in labels were unknown:" \ " %s" % list(unknown_chw)) rc = -xsconstants.XSERR_BAD_LABEL return rc, mapfile, all_bin.tostring() def validate_enforced_policy_hash(self): """ verify that the policy hash embedded in the binary policy that is currently enforce matches the one of the XML policy. """ if self.hash().digest() != self.get_enforced_policy_hash(): raise Exception('Policy hashes do not match') def get_enforced_policy_hash(self): binpol = self.get_enforced_binary() headerformat="!iiiiiiiiii20s" res = struct.unpack(headerformat, binpol[:60]) if len(res) >= 11: return res[10] return None def get_enforced_binary(self): rc, binpol = security.hv_get_policy() if rc != 0: raise SecurityError(-xsconstants.XSERR_HV_OP_FAILED) return binpol get_enforced_binary = classmethod(get_enforced_binary)
gpl-2.0
7,286,991,950,228,136,000
36.038224
186
0.534323
false
3.766284
false
false
false
FTSRG/mondo-collab-framework
integration/uk.ac.york.mondo.integration.eclipse.product/generate-p2inf.py
1
6558
#!/usr/bin/env python3 import argparse ENTRIES_NOGPL = ( ('fr.inria.atlanmod.mondo.integration.cloudatl.cli.feature.feature.group', '[0.0.0,2.0.0)'), ('org.eclipse.ecf.core.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.ecf.core.ssl.feature.feature.group', '[1.0.0,2.0.0)'), ('org.eclipse.ecf.filetransfer.feature.feature.group', '[3.10.0,4.0.0)'), ('org.eclipse.ecf.filetransfer.httpclient4.feature.feature.group', '[3.10.0,4.0.0)'), ('org.eclipse.ecf.filetransfer.httpclient4.ssl.feature.feature.group', '[1.0.0,2.0.0)'), ('org.eclipse.ecf.filetransfer.ssl.feature.feature.group', '[1.0.0,2.0.0)'), ('org.eclipse.epsilon.core.dependencies.feature.feature.group', '[1.3.0,2.0.0)'), ('org.eclipse.epsilon.core.dt.feature.feature.group', '[1.3.0,2.0.0)'), ('org.eclipse.epsilon.core.feature.feature.group', '[1.3.0,2.0.0)'), ('org.eclipse.epsilon.emf.dt.dependencies.feature.feature.group', '[1.3.0,2.0.0)'), ('org.eclipse.epsilon.emf.dt.feature.feature.group', '[1.3.0,2.0.0)'), ('org.eclipse.epsilon.emf.feature.feature.group', '[1.3.0,2.0.0)'), ('org.eclipse.incquery.databinding.runtime.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.databinding.runtime.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.querybasedfeatures.runtime.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.querybasedfeatures.runtime.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.querybasedfeatures.tooling.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.querybasedfeatures.tooling.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.runtime.evm.transactions.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.runtime.evm.transactions.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.runtime.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.runtime.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.runtime.generic.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.runtime.generic.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.sdk.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.sdk.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.validation.runtime.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.validation.runtime.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.validation.tooling.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.validation.tooling.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.viewers.runtime.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.viewers.runtime.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.viewers.tooling.ui.feature.feature.group', '[1.1.0,2.0.0)'), ('org.eclipse.incquery.viewers.tooling.ui.feature.source.feature.group', '[1.1.0,2.0.0)'), ('org.emf.splitter.feature.feature.group', '[0.1.0,2.0.0)'), ('org.hawk.bpmn.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.core.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.emf.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.emfresource.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.epsilon.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.git.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.localfolder.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.modelio.exml.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.orientdb.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.osgiserver.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.svn.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.ui.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.workspace.feature.feature.group', '[1.0.0,2.0.0)'), ('org.mondo.editor.feature.feature.group', '[0.3.8,2.0.0)'), ('uk.ac.york.mondo.integration.api.feature.feature.group', '[1.0.0,2.0.0)'), ('uk.ac.york.mondo.integration.clients.cli.feature.feature.group', '[1.0.0,2.0.0)'), ('uk.ac.york.mondo.integration.hawk.cli.feature.feature.group', '[1.0.0,2.0.0)'), ('uk.ac.york.mondo.integration.hawk.emf.dt.feature.feature.group', '[1.0.0,2.0.0)'), ('uk.ac.york.mondo.integration.hawk.emf.feature.feature.group', '[1.0.0,2.0.0)'), ('uk.ac.york.mondo.integration.hawk.emf.emfsplitter.feature.feature.group', '[1.0.0,2.0.0)'), ('uk.ac.york.mondo.integration.hawk.remote.thrift.feature.feature.group', '[1.0.0,2.0.0)'), ('uk.ac.york.mondo.integration.server.users.cli.feature.feature.group', '[1.0.0,2.0.0)'), ('ReactiveATLFeature.feature.group', '[1.0.0,2.0.0)'), ('org.eclipse.viatra.cep.feature.feature.group', '[0.8.0,2.0.0)'), ('org.eclipse.viatra.dse.feature.feature.group', '[0.8.0,2.0.0)'), ('org.eclipse.viatra.dse.merge.feature.feature.group', '[1.0.0,2.0.0)'), ('org.eclipse.viatra.emf.mwe2integration.feature.feature.group', '[0.8.0,2.0.0)'), ('org.eclipse.viatra.emf.runtime.feature.feature.group', '[0.8.0,2.0.0)'), ('org.eclipse.viatra.modelobfuscator.feature.feature.group', '[0.7.0,2.0.0)'), ('org.mondo.collaboration.security.macl.feature.feature.group', '[1.0.0,2.0.0)'), ('org.mondo.collaboration.security.mpbl.feature.feature.group', '[1.0.0,2.0.0)'), # ('org.mondo.wt.cstudy.editor.feature.feature.group', '[1.0.0,2.0.0)'), ('org.mondo.wt.cstudy.merge.feature.feature.group', '[1.0.0,2.0.0)'), ('org.mondo.wt.cstudy.model.feature.feature.group', '[1.0.0,2.0.0)'), ) ENTRIES_GPL = ( ('org.hawk.neo4jv2.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.ifc.feature.feature.group', '[1.0.0,2.0.0)'), ('uk.ac.york.mondo.integration.hawk.emf.ifcexport.feature.feature.group', '[1.0.0,2.0.0)'), ('org.hawk.modelio.feature.feature.group', '[1.0.0,2.0.0)'), ) parser = argparse.ArgumentParser(description='Generates the p2.inf file for the MONDO Eclipse products.') parser.add_argument('--gpl', action='store_true', help='Include advice for GPL features') args = parser.parse_args() ENTRIES = ENTRIES_NOGPL + ENTRIES_GPL if args.gpl else ENTRIES_NOGPL n_requires = 0 for entry in ENTRIES: print("""requires.{n}.namespace = org.eclipse.equinox.p2.iu requires.{n}.name = {name} requires.{n}.range = {range} """.format(n=n_requires, name=entry[0], range=entry[1])) n_requires += 1
epl-1.0
707,290,959,912,996,600
65.242424
105
0.657823
false
2.394304
false
false
false
jyotiska/colorweave
setup.py
1
1640
#!/usr/bin/env python from distutils.core import setup import sys, os, multiprocessing import colorweave requires = [] py_version = sys.version_info[:2] PY3 = py_version[0] == 3 if PY3: raise RuntimeError('colorweave runs only on Python 2.6 or Python 2.7') else: if py_version < (2, 6): raise RuntimeError('On Python 2, colorweave requires Python 2.6 or better') if py_version > (2, 6): pass def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='colorweave', version='0.1', description="Extract dominant colors from an image as a color palette", long_description=read('README.rst'), classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", "Intended Audience :: Telecommunications Industry", "License :: Free for non-commercial use", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", ], install_requires=[ 'Pillow==1.7.8', 'PIL>=1.1.6', 'colormath==1.0.8', 'numpy>=1.6.1', 'webcolors>=1.4', ], keywords='color dominant palette colorweave kmeans css3 css21 name webcolors', author='Jyotiska NK', author_email='jyotiska123@gmail.com', url='http://github.com/jyotiska/colorweave', py_modules=['colorweave'], scripts=['colorweave.py'], )
gpl-2.0
400,651,015,008,337,540
28.943396
83
0.604878
false
3.620309
false
false
false
jbergantine/django-blog
tests/test_settings.py
1
2101
import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'fake-key' DEBUG = True ALLOWED_HOSTS = [] INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django_blog', 'tests', ] ROOT_URLCONF = 'tests.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'tests.wsgi.application' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/' SITE_ID = 1 FIXTURE_DIRS = [os.path.join(BASE_DIR, 'fixtures'), ] BLOG_PAGE_SIZE = 15
mit
5,818,926,728,844,144,000
20.885417
83
0.623037
false
3.41626
false
true
false
liugangabc/ccs_web
protocol/proto_indexResponseCheckAgentInfo_pb2.py
1
4660
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: proto_indexResponseCheckAgentInfo.proto from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) DESCRIPTOR = _descriptor.FileDescriptor( name='proto_indexResponseCheckAgentInfo.proto', package='', serialized_pb='\n\'proto_indexResponseCheckAgentInfo.proto\"\xb9\x01\n\x1bindexResponseCheckAgentInfo\x12?\n\x07rspInfo\x18\x01 \x03(\x0b\x32..indexResponseCheckAgentInfo.checkResponseInfo\x1aY\n\x11\x63heckResponseInfo\x12\x0b\n\x03sIP\x18\x01 \x02(\t\x12\x0c\n\x04\x66\x43pu\x18\x02 \x01(\x02\x12\x0c\n\x04\x66Mem\x18\x03 \x01(\x02\x12\r\n\x05\x66\x44isk\x18\x04 \x01(\x02\x12\x0c\n\x04iTfs\x18\x05 \x01(\x05') _INDEXRESPONSECHECKAGENTINFO_CHECKRESPONSEINFO = _descriptor.Descriptor( name='checkResponseInfo', full_name='indexResponseCheckAgentInfo.checkResponseInfo', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='sIP', full_name='indexResponseCheckAgentInfo.checkResponseInfo.sIP', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=unicode("", "utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='fCpu', full_name='indexResponseCheckAgentInfo.checkResponseInfo.fCpu', index=1, number=2, type=2, cpp_type=6, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='fMem', full_name='indexResponseCheckAgentInfo.checkResponseInfo.fMem', index=2, number=3, type=2, cpp_type=6, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='fDisk', full_name='indexResponseCheckAgentInfo.checkResponseInfo.fDisk', index=3, number=4, type=2, cpp_type=6, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='iTfs', full_name='indexResponseCheckAgentInfo.checkResponseInfo.iTfs', index=4, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, extension_ranges=[], serialized_start=140, serialized_end=229, ) _INDEXRESPONSECHECKAGENTINFO = _descriptor.Descriptor( name='indexResponseCheckAgentInfo', full_name='indexResponseCheckAgentInfo', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='rspInfo', full_name='indexResponseCheckAgentInfo.rspInfo', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[_INDEXRESPONSECHECKAGENTINFO_CHECKRESPONSEINFO, ], enum_types=[ ], options=None, is_extendable=False, extension_ranges=[], serialized_start=44, serialized_end=229, ) _INDEXRESPONSECHECKAGENTINFO_CHECKRESPONSEINFO.containing_type = _INDEXRESPONSECHECKAGENTINFO; _INDEXRESPONSECHECKAGENTINFO.fields_by_name['rspInfo'].message_type = _INDEXRESPONSECHECKAGENTINFO_CHECKRESPONSEINFO DESCRIPTOR.message_types_by_name['indexResponseCheckAgentInfo'] = _INDEXRESPONSECHECKAGENTINFO class indexResponseCheckAgentInfo(_message.Message): __metaclass__ = _reflection.GeneratedProtocolMessageType class checkResponseInfo(_message.Message): __metaclass__ = _reflection.GeneratedProtocolMessageType DESCRIPTOR = _INDEXRESPONSECHECKAGENTINFO_CHECKRESPONSEINFO # @@protoc_insertion_point(class_scope:indexResponseCheckAgentInfo.checkResponseInfo) DESCRIPTOR = _INDEXRESPONSECHECKAGENTINFO # @@protoc_insertion_point(class_scope:indexResponseCheckAgentInfo) # @@protoc_insertion_point(module_scope)
apache-2.0
-1,883,150,462,296,093,200
37.833333
414
0.741845
false
3.369487
false
false
false
cvandeplas/plaso
plaso/formatters/skype.py
1
2395
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2013 The Plaso Project Authors. # Please see the AUTHORS file for details on individual authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Formatter for the Skype Main database events.""" from plaso.formatters import interface class SkypeAccountFormatter(interface.ConditionalEventFormatter): """Formatter for Skype Account information.""" DATA_TYPE = 'skype:event:account' FORMAT_STRING_PIECES = [u'{username}', u'[{email}]', u'Country: {country}'] SOURCE_LONG = 'Skype Account' SOURCE_SHORT = 'LOG' class SkypeChatFormatter(interface.ConditionalEventFormatter): """Formatter for Skype chat events.""" DATA_TYPE = 'skype:event:chat' FORMAT_STRING_PIECES = [ u'From: {from_account}', u'To: {to_account}', u'[{title}]', u'Message: [{text}]'] FORMAT_STRING_SHORT_PIECES = [u'From: {from_account}', u' To: {to_account}'] SOURCE_LONG = 'Skype Chat MSG' SOURCE_SHORT = 'LOG' class SkypeSMSFormatter(interface.ConditionalEventFormatter): """Formatter for Skype SMS.""" DATA_TYPE = 'skype:event:sms' FORMAT_STRING_PIECES = [u'To: {number}', u'[{text}]'] SOURCE_LONG = 'Skype SMS' SOURCE_SHORT = 'LOG' class SkypeCallFormatter(interface.ConditionalEventFormatter): """Formatter for Skype calls.""" DATA_TYPE = 'skype:event:call' FORMAT_STRING_PIECES = [ u'From: {src_call}', u'To: {dst_call}', u'[{call_type}]'] SOURCE_LONG = 'Skype Call' SOURCE_SHORT = 'LOG' class SkypeTransferFileFormatter(interface.ConditionalEventFormatter): """Formatter for Skype transfer files""" DATA_TYPE = 'skype:event:transferfile' FORMAT_STRING_PIECES = [ u'Source: {source}', u'Destination: {destination}', u'File: {transferred_filename}', u'[{action_type}]'] SOURCE_LONG = 'Skype Transfer Files' SOURCE_SHORT = 'LOG'
apache-2.0
-3,638,039,270,999,130,000
26.215909
78
0.692276
false
3.486172
false
false
false
djkonro/client-python
kubernetes/client/models/v1_pod_condition.py
2
7465
# coding: utf-8 """ Kubernetes No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: v1.7.4 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class V1PodCondition(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ def __init__(self, last_probe_time=None, last_transition_time=None, message=None, reason=None, status=None, type=None): """ V1PodCondition - a model defined in Swagger :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMap: The key is attribute name and the value is json key in definition. """ self.swagger_types = { 'last_probe_time': 'datetime', 'last_transition_time': 'datetime', 'message': 'str', 'reason': 'str', 'status': 'str', 'type': 'str' } self.attribute_map = { 'last_probe_time': 'lastProbeTime', 'last_transition_time': 'lastTransitionTime', 'message': 'message', 'reason': 'reason', 'status': 'status', 'type': 'type' } self._last_probe_time = last_probe_time self._last_transition_time = last_transition_time self._message = message self._reason = reason self._status = status self._type = type @property def last_probe_time(self): """ Gets the last_probe_time of this V1PodCondition. Last time we probed the condition. :return: The last_probe_time of this V1PodCondition. :rtype: datetime """ return self._last_probe_time @last_probe_time.setter def last_probe_time(self, last_probe_time): """ Sets the last_probe_time of this V1PodCondition. Last time we probed the condition. :param last_probe_time: The last_probe_time of this V1PodCondition. :type: datetime """ self._last_probe_time = last_probe_time @property def last_transition_time(self): """ Gets the last_transition_time of this V1PodCondition. Last time the condition transitioned from one status to another. :return: The last_transition_time of this V1PodCondition. :rtype: datetime """ return self._last_transition_time @last_transition_time.setter def last_transition_time(self, last_transition_time): """ Sets the last_transition_time of this V1PodCondition. Last time the condition transitioned from one status to another. :param last_transition_time: The last_transition_time of this V1PodCondition. :type: datetime """ self._last_transition_time = last_transition_time @property def message(self): """ Gets the message of this V1PodCondition. Human-readable message indicating details about last transition. :return: The message of this V1PodCondition. :rtype: str """ return self._message @message.setter def message(self, message): """ Sets the message of this V1PodCondition. Human-readable message indicating details about last transition. :param message: The message of this V1PodCondition. :type: str """ self._message = message @property def reason(self): """ Gets the reason of this V1PodCondition. Unique, one-word, CamelCase reason for the condition's last transition. :return: The reason of this V1PodCondition. :rtype: str """ return self._reason @reason.setter def reason(self, reason): """ Sets the reason of this V1PodCondition. Unique, one-word, CamelCase reason for the condition's last transition. :param reason: The reason of this V1PodCondition. :type: str """ self._reason = reason @property def status(self): """ Gets the status of this V1PodCondition. Status is the status of the condition. Can be True, False, Unknown. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#pod-conditions :return: The status of this V1PodCondition. :rtype: str """ return self._status @status.setter def status(self, status): """ Sets the status of this V1PodCondition. Status is the status of the condition. Can be True, False, Unknown. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#pod-conditions :param status: The status of this V1PodCondition. :type: str """ if status is None: raise ValueError("Invalid value for `status`, must not be `None`") self._status = status @property def type(self): """ Gets the type of this V1PodCondition. Type is the type of the condition. Currently only Ready. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#pod-conditions :return: The type of this V1PodCondition. :rtype: str """ return self._type @type.setter def type(self, type): """ Sets the type of this V1PodCondition. Type is the type of the condition. Currently only Ready. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#pod-conditions :param type: The type of this V1PodCondition. :type: str """ if type is None: raise ValueError("Invalid value for `type`, must not be `None`") self._type = type def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, V1PodCondition): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
apache-2.0
5,466,332,796,939,748,000
28.741036
166
0.576825
false
4.312536
false
false
false
code-sauce/tensorflow
scripts/sr_classify_image.py
1
11313
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Simple image classification with Inception. Run image classification with Inception trained on ImageNet 2012 Challenge data set. This program creates a graph from a saved GraphDef protocol buffer, and runs inference on an input JPEG image. It outputs human readable strings of the top 5 predictions along with their probabilities. Change the --image_file argument to any jpg image to compute a classification of that image. Please see the tutorial and website for a detailed description of how to use this script to perform image recognition. https://tensorflow.org/tutorials/image_recognition/ """ # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function import solr import os.path import re import requests import csv import sys import tarfile import boto.dynamodb import boto import sqlite3 as lite import numpy as np import logging from six.moves import urllib import tensorflow as tf import argparse from datetime import datetime FLAGS = tf.app.flags.FLAGS BATCH_SIZE = 1000 COMMIT_BATCH_SIZE = 100 MODULE_PATH = os.path.abspath(os.path.split(__file__)[0]) PROJECT_PATH = "/".join(MODULE_PATH.split("/")[:-1]) NUM_TOP_PREDICTIONS = 5 PREDICTION_SQLITE_FILE_S3_LOCATION = 'net.shoprunner.prd.jarvis/tensorflow_recommendations' LABEL_MATCH_THRESHOLD = 0.5 # classify_image_graph_def.pb: # Binary representation of the GraphDef protocol buffer. # imagenet_synset_to_human_label_map.txt: # Map from synset ID to a human readable string. # imagenet_2012_challenge_label_map_proto.pbtxt: # Text representation of a protocol buffer mapping a label to synset ID. tf.app.flags.DEFINE_integer('num_top_predictions', 5, """Display this many predictions.""") DATA_DIR = None def _get_labels_from_file(filepath): """ The labels in the labels file generated while model creation is in a specific order. """ labels = '' with open(filepath) as f: labels = f.read() return filter(lambda x: x, labels.split('\n')) def get_best_sr_image_url(image_urls): for image in image_urls: if image.startswith('180x180|'): return image[8:] return None def create_graph(data_dir): """Creates a graph from saved GraphDef file and returns a saver.""" # Creates graph from saved graph_def.pb. # with tf.gfile.FastGFile(os.path.join( # PROJECT_PATH, 'output_graph.pb'), 'rb') as f: with tf.gfile.FastGFile(os.path.join( data_dir, 'output', 'output_graph.pb'), 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) del(graph_def.node[1].attr["dct_method"]) _ = tf.import_graph_def(graph_def, name='') def _get_sqlite_db_name_from_file_path(file_path): return file_path.split('/')[-1] def _sync_sqlite_file_to_s3(file_path): if datetime.now().minute == 0: # every hour # sync the sqlite file to a S3 bucket for Feed Loader to use s3_file_suffix = datetime.now().strftime("%Y/%m/%d/%H") s3_path = "%s/%s" % (PREDICTION_SQLITE_FILE_S3_LOCATION, s3_file_suffix) conn = boto.connect_s3() bucket = conn.create_bucket(s3_path) with open(file_path, 'r') as f: data = f.read() key = bucket.new_key(_get_sqlite_db_name_from_file_path(file_path)) key.set_contents_from_string(data) def run_inference_on_images(sess, image, doc_id, name, description, partner_code, table=None, cursor=None, file_path=None, labels_to_find=None, threshold=LABEL_MATCH_THRESHOLD, labels=None): """Runs inference on an image. Args: sess: tf session image: Image file name. doc_id: document id Returns: Nothing """ try: image_data = requests.get(image).content except Exception as ex: print(ex) return # Some useful tensors: # 'softmax:0': A tensor containing the normalized prediction across # 1000 labels. # 'pool_3:0': A tensor containing the next-to-last layer containing 2048 # float description of the image. # 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG # encoding of the image. # Runs the softmax tensor by feeding the image_data as input to the graph. softmax_tensor = sess.graph.get_tensor_by_name('final_result:0') feature_tensor = sess.graph.get_tensor_by_name('pool_3:0') # ADDED predictions = sess.run(softmax_tensor, {'DecodeJpeg/contents:0': image_data}) predictions = np.squeeze(predictions) feature_set = sess.run(feature_tensor, {'DecodeJpeg/contents:0': image_data}) # ADDED feature_set = np.squeeze(feature_set) # ADDED # print(feature_set) # ADDED top_k = predictions.argsort()[-NUM_TOP_PREDICTIONS:][::-1] for node_id in top_k: human_string = labels[node_id] score = predictions[node_id] filter_output = True if 'dresses' in labels_to_find: filter_output = filter_output and dress_filter_outs(name, description) if ( human_string in labels_to_find and filter_output ): if score > threshold: print(doc_id, name, image, human_string, score) cursor.execute( "INSERT INTO %s VALUES ('%s', '%s', '%s', %s, CURRENT_TIMESTAMP)" % (table, doc_id, partner_code, human_string, score) ) def extract_features_and_files(image_data, sess): pool3 = sess.graph.get_tensor_by_name('incept/pool_3:0') features = [] files = [] for fname, data in image_data.iteritems(): try: pool3_features = sess.run(pool3, {'incept/DecodeJpeg/contents:0': data}) features.append(np.squeeze(pool3_features)) files.append(fname) except: logging.error("error while processing fname {}".format(fname)) return features, files def get_batch(batch, solr_query): """ Returns: a list of tuples [(docid, imageurl), ....] """ s = solr.SolrConnection('http://solr-prod.s-9.us:8983/solr/shoprunner') images = [] results = s.query(solr_query, fields=['image_url', 'id', 'name', 'description', 'partner_code'], rows=BATCH_SIZE, start=batch*BATCH_SIZE).results image_sets = [(x['image_url'], x['id'], x['name'], x['description'], x['partner_code']) for x in results] print('products: %s to %s' % ((batch*BATCH_SIZE), (batch+1)*BATCH_SIZE)) count = 0 for image_set, doc_id, name, description, partner_code in image_sets: count += 1 # has all resolutions. we pick the biggest one for best match (hopefully?) best_match_image_url = None for image in image_set: if image.startswith('180x180|'): best_match_image_url = image[8:] break if not best_match_image_url: continue images.append((best_match_image_url, doc_id, name, description, partner_code)) return images def dress_filter_outs(name, description): if ( ('dress' in name.lower() or 'dress' in description.lower()) and 'skirt' not in name.lower() and 'top' not in name.lower() ): return True return False def tops_filter_outs(name, description): if 'top' in name.lower(): return True return False def main(): parser = argparse.ArgumentParser() parser.add_argument('--data-dir', help='Location to the data directory') parser.add_argument('--sqlite-file', help='Sqlite file location where the results of categorization are written to and used later') parser.add_argument('--table-name', help='Sqlite table name') parser.add_argument('--label-file', help='Path to the file where labels are generated') parser.add_argument('--labels-to-find', help='Label(s) to look for and classify. CSV separated if multiple labels') parser.add_argument('--solr-query', help='SOLR query phrase to look for the corpus and match the images eg: name_search:iPhone') parser.add_argument('--threshold', help='Minimum threshold to match a label') parser.add_argument('--sync-s3', help='Sync the classification results to S3') args = parser.parse_args() if not args.data_dir: raise Exception('Location to data directory is required.') if not args.label_file: raise Exception('Location to locate the output_labels.txt file') DATA_DIR = args.data_dir with tf.Session() as sess: create_graph(DATA_DIR) table = args.table_name or "ProductCategory" file_path = args.sqlite_file or 'suggested_dresses.db' label_file = args.label_file labels = _get_labels_from_file(label_file) labels_to_find = args.labels_to_find.split(",") if args.labels_to_find else labels solr_query = args.solr_query or '*:*' threshold = float(args.threshold or LABEL_MATCH_THRESHOLD) sync_s3 = args.sync_s3 or False conn = lite.connect(file_path) cur = conn.cursor() cur.execute("DROP TABLE IF EXISTS %s" % table) cur.execute( """ CREATE TABLE %s(doc_id VARCHAR(50), partner_code VARCHAR(50), category VARCHAR(50), confidence DECIMAL, created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY(doc_id, partner_code))""" % table ) cur.execute("CREATE INDEX IF NOT EXISTS idx_partner_code ON %s (partner_code)" % table) cur.execute("CREATE INDEX IF NOT EXISTS idx_created_date ON %s (partner_code)" % table) batch = 0 #with conn: image_tuples = get_batch(batch, solr_query) while image_tuples: for image_url, doc_id, name, description, partner_code in image_tuples: try: run_inference_on_images( sess, image_url, doc_id, name, description, partner_code, table=table,cursor=cur, file_path=file_path, labels_to_find=labels_to_find, threshold=threshold, labels=labels ) conn.commit() except Exception as ex: logging.exception("Error running inference on image: %s" % doc_id) batch+=1 image_tuples = get_batch(batch, solr_query) if sync_s3: _sync_sqlite_file_to_s3(file_path) if __name__ == '__main__': main()
apache-2.0
300,159,668,755,227,650
37.090909
149
0.632458
false
3.753484
false
false
false
changbindu/rufeng-finance
src/stock.py
1
8402
# coding=utf-8 __author__ = 'Du, Changbin <changbin.du@gmail.com>' import json import datetime import logging from collections import MutableMapping import numpy as np from pandas import DataFrame class StockBase(object): def __init__(self, code=None, name=None): # Basic info self.code = code # 代码 self.name = name # 名称 # data and update info self.last_update = None self.hist_data = None # DataFrame def __str__(self): ''' convert to string ''' return json.dumps({"code": self.code, "name": self.name, }, ensure_ascii = False) def __getitem__(self, key): if key not in self.__dict__: raise KeyError return self.__getattribute__(key) def __setitem__(self, key, value): if key not in self.__dict__: raise KeyError('key %s is invalid' % key) return self.__setattr__(key, value) def __delitem__(self, key): raise NotImplementedError def __len__(self): return len(self.__dict__) def __iter__(self): return self.__dict__.__iter__() def sanitize(self): if self.hist_data is not None: # self.hist_data.index = self.hist_data.index.map(np.datetime64) self.hist_data.index = self.hist_data.index.map(str) self.hist_data.sort_index(ascending=False, inplace=True) def check(self): pass @property def hist_len(self): return self.hist_data is None and 0 or self.hist_data.index.size @property def hist_max(self): return self.hist_data.high.max() @property def hist_min(self): return self.hist_data.low.min() @property def hist_start_date(self): return np.datetime64(self.hist_data.index[-1], 'D') @property def hist_last_date(self): return np.datetime64(self.hist_data.index[0], 'D') @property def ma30(self): return self._get_ma(30) @property def ma60(self): return self._get_ma(60) @property def ma120(self): return self._get_ma(120) @property def ma240(self): return self._get_ma(240) def _get_ma(self, window): df = self.hist_data[['open', 'close', 'low', 'high', 'volume', 'turnover']].sort_index(ascending=True) r = df.rolling(window=window) df = r.mean() df.sort_index(ascending=False, inplace=True) return df def get_hist_date(self, loc): return self.hist_data.index[loc] def get_turnover_avg(self, days): return self.hist_data.turnover[0:days].mean() class Stock(StockBase): st_prefix = ('*ST', 'ST', 'S*ST', 'SST') ''' stock class''' def __init__(self, code=None, name=None): super(Stock, self).__init__(code=code, name=name) # Basics self.industry = None # 所属行业 self.area = None # 地区 self.pe = None # 市盈率 self.outstanding = None # 流通股本 self.nmc = None # 流通市值(万元) self.totals = None # 总股本(万) self.mktcap = None # 总市值(万元) self.totalAssets = None # 总资产(万) self.liquidAssets = None # 流动资产 self.fixedAssets = None # 固定资产 self.reserved = None # 公积金 self.reservedPerShare = None # 每股公积金 self.eps = None # 每股收益 self.eps_yoy = None # 每股收益同比( %) self.bvps = None # 每股净资 self.pb = None # 市净率 self.timeToMarket = None # 上市日期 self.roe = None # 净资产收益率( %) self.epcf = None # 每股现金流量(元) self.net_profits = None # 净利润(万元) self.profits_yoy = None # 净利润同比( %) self.net_profit_ratio = None # 净利率( %) self.gross_profit_rate = None # 毛利率( %) self.business_income = None # 营业收入(百万元) self.bips = None # 每股主营业务收入(元) self.distrib = None # 分配方案 self.report_date = None # 发布日期 self.arturnover = None # 应收账款周转率(次) self.arturndays = None # 应收账款周转天数(天) self.inventory_turnover = None # 存货周转率(次) self.inventory_days = None # 存货周转天数(天) self.currentasset_turnover = None # 流动资产周转率(次) self.currentasset_days = None # 流动资产周转天数(天) self.mbrg = None # 主营业务收入增长率( %) self.nprg = None # 净利润增长率( %) self.nav = None # 净资产增长率 self.targ = None # 总资产增长率 self.epsg = None # 每股收益增长率 self.seg = None # 股东权益增长率 self.currentratio = None # 流动比率 self.quickratio = None # 速动比率 self.cashratio = None # 现金比率 self.icratio = None # 利息支付倍数 self.sheqratio = None # 股东权益比率 self.adratio = None # 股东权益增长率 self.cf_sales = None # 经营现金净流量对销售收入比率 self.rateofreturn = None # 资产的经营现金流量回报率 self.cf_nm = None # 经营现金净流量与净利润的比率 self.cf_liabilities = None # 经营现金净流量对负债比率 self.cashflowratio = None # 现金流量比率 self.price = float('NaN') def sanitize(self): super(Stock, self).sanitize() @property def qfq_data(self): """warning: calculate qfq data is expensive""" max_factor = self.hist_data.factor[0] df = self.hist_data[['open', 'close', 'low', 'high']] df = df.div(max_factor/self.hist_data.factor, axis='index') df = df.join(self.hist_data[['volume', 'turnover', 'factor']]) df.sort_index(ascending=False, inplace=True) assert df.index.size == self.hist_data.index.size return df def get_hist_value(self, column, date): return self.hist_data[column][date] def check(self): if self.hist_data.size == 0: logging.warning('no data in hist_data') return False df_nan = self.hist_data[self.hist_data.isnull().any(axis=1)] if df_nan.index.size > 0: logging.warning('found nan in hist_data\n%s' % df_nan) return False if self.hist_data.index.has_duplicates: logging.warning('found duplicates\n%s' % self.hist_data.index.get_duplicates()) return False return True class Index(StockBase): index_name_map = {'000001': ('sh', '上证指数'), '399001': ('sz', '深圳成指'), '000300': ('hs300', '沪深300指数'), '000016': ('sz50', '上证50'), '399101': ('zxb', '中小板'), '399005': ('cyb', '创业板') } def __init__(self, code=None, name=None, symbol=None): super(Index, self).__init__(code=code, name=name) self.symbol = symbol class StockCalendar(object): def __init__(self): pass def is_trading_day(self, date=datetime.date.today()): # quick check if date.weekday() > 5: return False # check from history #if str(date) not in self.sz_index.hist_date.index: # return False return True def is_trading_now(self): if not self.is_trading_day(): return False now = datetime.now() t1 = datetime(now.year, now.month, now.day, 9, 30) t2 = datetime(now.year, now.month, now.day, 11, 30) t3 = datetime(now.year, now.month, now.day, 13, 0) t4 = datetime(now.year, now.month, now.day, 15, 0) def time_in(t, t1, t2): return (t-t1).total_seconds() >= 0 and (t2-t).total_seconds() > 0 return time_in(now, t1, t2) or time_in(now, t3, t4) def last_completed_trade_day(self): today = datetime.date.today() if self.is_trading_day() and datetime.datetime.now().hour > 13: return today for i in range(1, 7): date = today-datetime.timedelta(days=i) if self.is_trading_day(date): return date
lgpl-3.0
-4,006,361,539,967,852,000
30.40081
110
0.557891
false
2.835832
false
false
false
thomas-hinterecker/SQUARELAND2.0
Miscellaneous/OpenSesame/squareland2/squareland2.py
1
2893
# Import the required modules. import shutil import subprocess from os import chdir from os import listdir from os.path import isfile, join import pygame import psychopy from libopensesame import debug from libopensesame.item import item from libqtopensesame.items.qtautoplugin import qtautoplugin class squareland2(item): description = u'Plug-in description' def __init__(self, name, experiment, script=None): """ Constructor. Arguments: name -- The name of the plug-in. experiment -- The experiment object. Keyword arguments: script -- A definition script. (default=None) """ # Call the parent constructor. item.__init__(self, name, experiment, script) def prepare(self): # Call parent functions. item.prepare(self) # Prepare your plug-in here. #content = "" #with open(self.experiment.pool_folder + '\\' + self.get('settings_file'), 'r') as content_file: # content = content_file.read() # #target = open("plugins\\squareland2\\SQUARELAND2.0\\Assets\\Settings.xml", 'w') #target.truncate() #target.write(content) #target.close() # #content = "" #with open(self.experiment.pool_folder + '\\' + self.get('procedure_file'), 'r') as content_file: # content = content_file.read() # #target = open("plugins\\squareland2\\SQUARELAND2.0\\Assets\\Procedure.xml", 'w') #target.truncate() #target.write(content) #target.close() def run(self): # Record the timestamp of the plug-in execution. self.set_item_onset() # Run your plug-in here. win = self.experiment.window if self.experiment.canvas_backend == 'psycho': win.winHandle.minimize() win.fullscr = False win.flip() else: pygame.display.iconify() if True == True: try: chdir(self.get('path')) subprocess.call([self.get('path') + "\\SQUARELAND2.0.exe"]) #launch external program #print self.get('path') + "\\SQUARELAND2.0.exe" path = self.get('path') + "\\LogFiles\\" for f in listdir(path): f_split = f.split('.') file = join(path, f) if isfile(file) and f_split.pop() == "txt": dst = self.get('path') + "\\..\\%s_" % self.get('subject_nr') if isfile(dst + f) == True: count = 1 dst2 = dst + '%s_' % count while isfile(dst2 + f) == True: count += 1 dst2 = dst + '%s_' % count dst = dst2 shutil.copyfile(file, dst + f) except WindowsError: print "SQUARELAND2.0 application couldn't be launched!" if self.experiment.canvas_backend == 'psycho': win.winHandle.activate() win.winHandle.maximize() win.fullscr = True win.flip() else: pygame.display.set_mode() class qtsquareland2(squareland2, qtautoplugin): def __init__(self, name, experiment, script=None): # Call parent constructors. squareland2.__init__(self, name, experiment, script) qtautoplugin.__init__(self, __file__)
gpl-2.0
-7,096,678,303,757,643,000
24.610619
99
0.646733
false
3.035677
false
false
false
HerdOfBears/Learning_Machine_Learning
Reinforcement Learning/cart_pole_bins.py
1
6641
import pandas as pd import gym import numpy as np import matplotlib.pyplot as plt GAMMA = 0.9 ALL_POSSIBLE_ACTIONS = [0,1] def random_action(a, epsilon): p = np.random.rand() if p < epsilon: action = np.random.choice(ALL_POSSIBLE_ACTIONS) else: action = a return action def max_dict(dictionary, s): # dictionary --> dictionary of tuples (s,x) # s --> element # This function will find argmax[x]{ dictionary(s,x) } max_x = None max_val = float("-inf") """ for tup in dictionary: if s==tup[0]: if dictionary[tup] >= max_val: max_val = dictionary[tup] max_x = tup[1] """ if dictionary[(s,0)] >dictionary[(s,1)]: max_x = 0 else: max_x = 1 return max_x def epsilon_greedy_on(Q,s,epsilon): max_A = max_dict(Q,s) action = random_action(max_A,epsilon) return action def cut_into_interals(observation, minim, maxim, number_of_intervals,obs_idx): # minim --> minimum box boundary, but we check to infinity on either side. # maxim --> maximum box boundary, "" # This will cut the continuous valued observation into intervals, then # return which interval the observation is in. # Ex: -1.5 # (-inf -2.4], (-2.4,-1.2],(-1.2,0] (0, 1.2] (1.2,2.4] [2.4,+inf) # 1 2 3 4 5 6 # -1.5 is in the 2nd interval # return 2 delta = (maxim - (-1)*(maxim))/number_of_intervals v_delta = np.arange(minim+delta,maxim,delta) v_delta_shape = v_delta.shape[0] v_delta = v_delta.reshape(1,v_delta_shape) #print(obs_idx, " : ",v_delta) for i in range(v_delta_shape-1): if observation[0][obs_idx] <= v_delta[0][0]: s1 = 1 break if (v_delta[0][i+1]>=observation[0][obs_idx]) and (observation[0][obs_idx]>v_delta[0][i]): s1 = i+1+1 break if observation[0][obs_idx]>=v_delta[0][-1]: s1 = number_of_intervals return s1 def get_state(observation): # observation --> vector of observations # This function uses the observations and the binning to get the state # in a 4D box. # Things we know: the episode ends if the angle of the pole is >= 15 degrees # the episode ends if the position of the cart is >= 2.4 # observations = [position, velocity, angle, rotation rate] # Position interval: (-2.4, 2.4) pos1 = cut_into_interals(observation,-2.4,2.4,10,0) # velocity interval vel1 = cut_into_interals(observation,-2,2,10,1) # angle interval ang1 = cut_into_interals(observation,-0.4,0.4,10,2) # rotation rate interval rot1 = cut_into_interals(observation,-3.5,3.5,10,3) state = int( str(pos1)+str(vel1)+str(ang1)+str(rot1) ) return state def play_episode(env,Q,episode_idx): done = False #env = gym.make('CartPole-v1') # Chance of exploration #epsilon = 0.1/((episode_idx/5000)+1) # when /10, and when /1, ~1000 episodes is when improvement started to wane # multiplying episode_idx by 4.5 resulted in a nice learning curve epsilon = 1.0/((np.sqrt((episode_idx) + 1))**(4/3)) # Start position observation = env.reset() observation = observation.reshape(1,4) s = get_state(observation) # Starting action a = epsilon_greedy_on(Q,s,epsilon) num = 0 tot_r = 1 alpha = 0.1 while not done: num+=1 observation, r, done, _ = env.step(a) observation = observation.reshape(1,4) s_prime = get_state(observation) a_prime = epsilon_greedy_on(Q,s,epsilon) if done and num < 199: r = -400 #max_A = max_dict(Q,s_prime) #max_Q = Q[(s_prime, max_A)] if Q[(s_prime,0)] >= Q[(s_prime,1)]: max_Q = Q[(s_prime,0)] else: max_Q = Q[(s_prime,1)] Q[(s,a)] = Q[(s,a)] + (alpha)*(r + GAMMA*max_Q - Q[(s,a)]) a = a_prime s = s_prime tot_r += r return Q, num+1 # reaching 200 reward is solving the game # Rather than using each time step as a +1 reward, we redefine # the reward as -[large number] upon failing # if tot_r < 200: # r = -500 def main(N=100): # Initialize the action-value function Q, and an arbitrary policy # There are 10**4=10,000 possible states # There are 2 possible actions # So a total of 20,000 possible (s,a) pairs Q = {} policy = {} for i in range(1,11,1): for j in range(1,11,1): for k in range(1,11,1): for l in range(1,11,1): st = int( str(i)+str(j)+str(k)+str(l) ) Q[(st,0)] = np.random.uniform(-1,1) Q[(st,1)] = np.random.uniform(-1,1) policy[st] = 0 # Practice/Train env = gym.make("CartPole-v0") tota = 0 y_vals = [] x_vals = [] for i_episode in range(N): if (i_episode%100 == 0) and (i_episode!=0): x_vals.append(i_episode) y_vals.append(tota/100) print("episode = ",i_episode) print(tota/100) tota = 0 Q, totR = play_episode(env, Q,i_episode) tota += totR plt.plot(x_vals,y_vals) plt.show() ans = input("continue to the test?") if ans.lower() in ["y","yes"]: pass else: raise ValueError # Make optimal policy for i in range(1,11,1): for j in range(1,11,1): for k in range(1,11,1): for l in range(1,11,1): st = int( str(i)+str(j)+str(k)+str(l) ) if Q[(st,0)] >= Q[(st,1)]: policy[st]=0 else: policy[st]=1 # Test tot_r=0 for i in range(100): env = gym.make("CartPole-v0") done = False # Start position observation = env.reset() observation = observation.reshape(1,4) s = get_state(observation) # Starting action a = policy[s] num = 0 while not done: if i ==1: env.render() num+=1 observation, r, done, _ = env.step(a) observation = observation.reshape(1,4) s_prime = get_state(observation) a_prime = policy[s_prime] a = a_prime s = s_prime tot_r += r #tot_r = (1/10)*tot_r if i == 1: env.close() print("tot reward = ",tot_r/100) if (0):#tot_r/100 > 195: df = pd.DataFrame() df = df.from_dict(policy,orient="index").reset_index() df.to_csv("C:/Users/Jyler/Documents/ProgrammingProjects/reinforcement/cart_pole_bins_solved.csv",index=False) print("Saved") #return tot_r def is_learning(): df = pd.read_csv("C:/Users/Jyler/Documents/ProgrammingProjects/reinforcement/cart_pole_bins_solved.csv") policy = df.set_index("index").T.to_dict("list") #return policy #print(df.head()) env = gym.make("CartPole-v1") done = False # Start position observation = env.reset() observation = observation.reshape(1,4) s = get_state(observation) # Starting action a = policy[s][0] tot_r = 0 num = 0 while not done: env.render() num+=1 observation, r, done, _ = env.step(a) observation = observation.reshape(1,4) s_prime = get_state(observation) a_prime = policy[s_prime][0] a = a_prime s = s_prime tot_r += r env.close() print(tot_r)
mit
7,232,982,705,181,853,000
22.633452
111
0.622045
false
2.54835
false
false
false
RickyCook/DockCI
dockci/views/project.py
2
2165
""" Views related to project management """ from flask import abort, redirect, render_template, request from flask_security import current_user from dockci.api.job import filter_jobs_by_request from dockci.models.project import Project from dockci.server import APP from dockci.util import str2bool def shields_io_sanitize(text): """ Replace chars in shields.io fields """ return text.replace('-', '--').replace('_', '__').replace(' ', '_') @APP.route('/project/<slug>.<extension>', methods=('GET',)) def project_shield_view(slug, extension): """ View to give shields for each project """ project = Project.query.filter_by(slug=slug).first_or_404() if not (project.public or current_user.is_authenticated()): abort(404) try: query = '?style=%s' % request.args['style'] except KeyError: query = '' return redirect( 'https://img.shields.io/badge/' '{name}-{shield_status}-{shield_color}.{extension}{query}'.format( name=shields_io_sanitize(project.name), shield_status=shields_io_sanitize(project.shield_text), shield_color=shields_io_sanitize(project.shield_color), extension=extension, query=query, ) ) @APP.route('/projects/<slug>', methods=('GET',)) def project_view(slug): """ View to display a project """ project = Project.query.filter_by(slug=slug).first_or_404() if not (project.public or current_user.is_authenticated()): abort(404) page_size = int(request.args.get('page_size', 20)) page = int(request.args.get('page', 1)) jobs = filter_jobs_by_request(project).paginate(page, page_size) # Copied from filter_jobs_by_request :( try: versioned = request.values['versioned'] if versioned == '': # Acting as a switch versioned = True else: versioned = str2bool(versioned) except KeyError: versioned = False return render_template( 'project.html', project=project, jobs=jobs, versioned=versioned, branch=request.values.get('branch', None), )
isc
-5,097,623,252,212,684,000
27.486842
74
0.625866
false
3.81162
false
false
false
Triv90/Heat
heat/tests/test_api_openstack_v1.py
1
71334
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import unittest import mox from nose.plugins.attrib import attr from oslo.config import cfg import webob.exc from heat.common import context from heat.common import identifier from heat.openstack.common import rpc import heat.openstack.common.rpc.common as rpc_common from heat.common.wsgi import Request from heat.common import urlfetch import heat.api.openstack.v1 as api_v1 import heat.api.openstack.v1.stacks as stacks import heat.api.openstack.v1.resources as resources import heat.api.openstack.v1.events as events @attr(tag=['unit', 'api-openstack-v1']) @attr(speed='fast') class InstantiationDataTest(unittest.TestCase): def setUp(self): self.m = mox.Mox() def tearDown(self): self.m.UnsetStubs() def test_format_parse(self): data = {"key1": ["val1[0]", "val1[1]"], "key2": "val2"} json_repr = '{ "key1": [ "val1[0]", "val1[1]" ], "key2": "val2" }' parsed = stacks.InstantiationData.format_parse(json_repr, 'foo') self.assertEqual(parsed, data) def test_format_parse_invalid(self): self.assertRaises(webob.exc.HTTPBadRequest, stacks.InstantiationData.format_parse, '!@#$%^&not json', 'Garbage') def test_stack_name(self): body = {'stack_name': 'wibble'} data = stacks.InstantiationData(body) self.assertEqual(data.stack_name(), 'wibble') def test_stack_name_missing(self): body = {'not the stack_name': 'wibble'} data = stacks.InstantiationData(body) self.assertRaises(webob.exc.HTTPBadRequest, data.stack_name) def test_template_inline(self): template = {'foo': 'bar', 'blarg': 'wibble'} body = {'template': template} data = stacks.InstantiationData(body) self.assertEqual(data.template(), template) def test_template_string_json(self): template = '{"foo": "bar", "blarg": "wibble"}' body = {'template': template} data = stacks.InstantiationData(body) self.assertEqual(data.template(), json.loads(template)) def test_template_string_yaml(self): template = '''foo: bar blarg: wibble ''' parsed = {u'HeatTemplateFormatVersion': u'2012-12-12', u'Mappings': {}, u'Outputs': {}, u'Parameters': {}, u'Resources': {}, u'blarg': u'wibble', u'foo': u'bar'} body = {'template': template} data = stacks.InstantiationData(body) self.assertEqual(data.template(), parsed) def test_template_url(self): template = {'foo': 'bar', 'blarg': 'wibble'} url = 'http://example.com/template' body = {'template_url': url} data = stacks.InstantiationData(body) self.m.StubOutWithMock(urlfetch, 'get') urlfetch.get(url).AndReturn(json.dumps(template)) self.m.ReplayAll() self.assertEqual(data.template(), template) self.m.VerifyAll() def test_template_priority(self): template = {'foo': 'bar', 'blarg': 'wibble'} url = 'http://example.com/template' body = {'template': template, 'template_url': url} data = stacks.InstantiationData(body) self.m.StubOutWithMock(urlfetch, 'get') self.m.ReplayAll() self.assertEqual(data.template(), template) self.m.VerifyAll() def test_template_missing(self): template = {'foo': 'bar', 'blarg': 'wibble'} body = {'not the template': template} data = stacks.InstantiationData(body) self.assertRaises(webob.exc.HTTPBadRequest, data.template) def test_user_params(self): params = {'foo': 'bar', 'blarg': 'wibble'} body = {'parameters': params} data = stacks.InstantiationData(body) self.assertEqual(data.user_params(), params) def test_user_params_missing(self): params = {'foo': 'bar', 'blarg': 'wibble'} body = {'not the parameters': params} data = stacks.InstantiationData(body) self.assertEqual(data.user_params(), {}) def test_args(self): body = { 'parameters': {}, 'stack_name': 'foo', 'template': {}, 'template_url': 'http://example.com/', 'timeout_mins': 60, } data = stacks.InstantiationData(body) self.assertEqual(data.args(), {'timeout_mins': 60}) class ControllerTest(object): """ Common utilities for testing API Controllers. """ def __init__(self, *args, **kwargs): super(ControllerTest, self).__init__(*args, **kwargs) self.maxDiff = None self.m = mox.Mox() cfg.CONF.set_default('engine_topic', 'engine') cfg.CONF.set_default('host', 'host') self.topic = '%s.%s' % (cfg.CONF.engine_topic, cfg.CONF.host) self.api_version = '1.0' self.tenant = 't' def tearDown(self): self.m.UnsetStubs() def _create_context(self, user='api_test_user'): ctx = context.get_admin_context() self.m.StubOutWithMock(ctx, 'username') ctx.username = user self.m.StubOutWithMock(ctx, 'tenant_id') ctx.tenant_id = self.tenant return ctx def _environ(self, path): return { 'SERVER_NAME': 'heat.example.com', 'SERVER_PORT': 8004, 'SCRIPT_NAME': '/v1', 'PATH_INFO': '/%s' % self.tenant + path, 'wsgi.url_scheme': 'http', } def _simple_request(self, path, method='GET'): environ = self._environ(path) environ['REQUEST_METHOD'] = method req = Request(environ) req.context = self._create_context() return req def _get(self, path): return self._simple_request(path) def _delete(self, path): return self._simple_request(path, method='DELETE') def _data_request(self, path, data, content_type='application/json', method='POST'): environ = self._environ(path) environ['REQUEST_METHOD'] = method req = Request(environ) req.context = self._create_context() req.body = data return req def _post(self, path, data, content_type='application/json'): return self._data_request(path, data, content_type) def _put(self, path, data, content_type='application/json'): return self._data_request(path, data, content_type, method='PUT') def _url(self, id): host = 'heat.example.com:8004' path = '/v1/%(tenant)s/stacks/%(stack_name)s/%(stack_id)s%(path)s' % id return 'http://%s%s' % (host, path) @attr(tag=['unit', 'api-openstack-v1', 'StackController']) @attr(speed='fast') class StackControllerTest(ControllerTest, unittest.TestCase): ''' Tests the API class which acts as the WSGI controller, the endpoint processing API requests after they are routed ''' def setUp(self): # Create WSGI controller instance class DummyConfig(): bind_port = 8004 cfgopts = DummyConfig() self.controller = stacks.StackController(options=cfgopts) def test_index(self): req = self._get('/stacks') identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1') engine_resp = [ { u'stack_identity': dict(identity), u'updated_time': u'2012-07-09T09:13:11Z', u'template_description': u'blah', u'description': u'blah', u'stack_status_reason': u'Stack successfully created', u'creation_time': u'2012-07-09T09:12:45Z', u'stack_name': identity.stack_name, u'stack_status': u'CREATE_COMPLETE', u'parameters': {}, u'outputs': [], u'notification_topics': [], u'capabilities': [], u'disable_rollback': True, u'timeout_mins': 60, } ] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_stacks', 'args': {}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() result = self.controller.index(req, tenant_id=identity.tenant) expected = { 'stacks': [ { 'links': [{"href": self._url(identity), "rel": "self"}], 'id': '1', u'updated_time': u'2012-07-09T09:13:11Z', u'description': u'blah', u'stack_status_reason': u'Stack successfully created', u'creation_time': u'2012-07-09T09:12:45Z', u'stack_name': u'wordpress', u'stack_status': u'CREATE_COMPLETE' } ] } self.assertEqual(result, expected) self.m.VerifyAll() def test_index_rmt_aterr(self): req = self._get('/stacks') self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_stacks', 'args': {}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("AttributeError")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req, tenant_id=self.tenant) self.m.VerifyAll() def test_index_rmt_interr(self): req = self._get('/stacks') self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_stacks', 'args': {}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("Exception")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPInternalServerError, self.controller.index, req, tenant_id=self.tenant) self.m.VerifyAll() def test_create(self): identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1') template = {u'Foo': u'bar'} json_template = json.dumps(template) parameters = {u'InstanceType': u'm1.xlarge'} body = {'template': template, 'stack_name': identity.stack_name, 'parameters': parameters, 'timeout_mins': 30} req = self._post('/stacks', json.dumps(body)) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'create_stack', 'args': {'stack_name': identity.stack_name, 'template': template, 'params': parameters, 'args': {'timeout_mins': 30}}, 'version': self.api_version}, None).AndReturn(dict(identity)) self.m.ReplayAll() try: response = self.controller.create(req, tenant_id=identity.tenant, body=body) except webob.exc.HTTPCreated as created: self.assertEqual(created.location, self._url(identity)) else: self.fail('HTTPCreated not raised') self.m.VerifyAll() def test_create_err_rpcerr(self): stack_name = "wordpress" template = {u'Foo': u'bar'} parameters = {u'InstanceType': u'm1.xlarge'} json_template = json.dumps(template) body = {'template': template, 'stack_name': stack_name, 'parameters': parameters, 'timeout_mins': 30} req = self._post('/stacks', json.dumps(body)) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'create_stack', 'args': {'stack_name': stack_name, 'template': template, 'params': parameters, 'args': {'timeout_mins': 30}}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("AttributeError")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, req, tenant_id=self.tenant, body=body) self.m.VerifyAll() def test_create_err_existing(self): stack_name = "wordpress" template = {u'Foo': u'bar'} parameters = {u'InstanceType': u'm1.xlarge'} json_template = json.dumps(template) body = {'template': template, 'stack_name': stack_name, 'parameters': parameters, 'timeout_mins': 30} req = self._post('/stacks', json.dumps(body)) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'create_stack', 'args': {'stack_name': stack_name, 'template': template, 'params': parameters, 'args': {'timeout_mins': 30}}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackExists")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPConflict, self.controller.create, req, tenant_id=self.tenant, body=body) self.m.VerifyAll() def test_create_err_engine(self): stack_name = "wordpress" template = {u'Foo': u'bar'} parameters = {u'InstanceType': u'm1.xlarge'} json_template = json.dumps(template) body = {'template': template, 'stack_name': stack_name, 'parameters': parameters, 'timeout_mins': 30} req = self._post('/stacks', json.dumps(body)) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'create_stack', 'args': {'stack_name': stack_name, 'template': template, 'params': parameters, 'args': {'timeout_mins': 30}}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError( 'StackValidationFailed', 'Something went wrong')) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, req, tenant_id=self.tenant, body=body) self.m.VerifyAll() def test_lookup(self): identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1') req = self._get('/stacks/%(stack_name)s' % identity) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'identify_stack', 'args': {'stack_name': identity.stack_name}, 'version': self.api_version}, None).AndReturn(identity) self.m.ReplayAll() try: result = self.controller.lookup(req, tenant_id=identity.tenant, stack_name=identity.stack_name) except webob.exc.HTTPFound as found: self.assertEqual(found.location, self._url(identity)) else: self.fail('No redirect generated') self.m.VerifyAll() def test_lookup_arn(self): identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1') req = self._get('/stacks%s' % identity.arn_url_path()) self.m.ReplayAll() try: result = self.controller.lookup(req, tenant_id=identity.tenant, stack_name=identity.arn()) except webob.exc.HTTPFound as found: self.assertEqual(found.location, self._url(identity)) else: self.fail('No redirect generated') self.m.VerifyAll() def test_lookup_nonexistant(self): stack_name = 'wibble' req = self._get('/stacks/%(stack_name)s' % locals()) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'identify_stack', 'args': {'stack_name': stack_name}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.lookup, req, tenant_id=self.tenant, stack_name=stack_name) self.m.VerifyAll() def test_lookup_resource(self): identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1') req = self._get('/stacks/%(stack_name)s/resources' % identity) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'identify_stack', 'args': {'stack_name': identity.stack_name}, 'version': self.api_version}, None).AndReturn(identity) self.m.ReplayAll() try: result = self.controller.lookup(req, tenant_id=identity.tenant, stack_name=identity.stack_name, path='resources') except webob.exc.HTTPFound as found: self.assertEqual(found.location, self._url(identity) + '/resources') else: self.fail('No redirect generated') self.m.VerifyAll() def test_lookup_resource_nonexistant(self): stack_name = 'wibble' req = self._get('/stacks/%(stack_name)s/resources' % locals()) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'identify_stack', 'args': {'stack_name': stack_name}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.lookup, req, tenant_id=self.tenant, stack_name=stack_name, path='resources') self.m.VerifyAll() def test_show(self): identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity) parameters = {u'DBUsername': u'admin', u'LinuxDistribution': u'F17', u'InstanceType': u'm1.large', u'DBRootPassword': u'admin', u'DBPassword': u'admin', u'DBName': u'wordpress'} outputs = [{u'output_key': u'WebsiteURL', u'description': u'URL for Wordpress wiki', u'output_value': u'http://10.0.0.8/wordpress'}] engine_resp = [ { u'stack_identity': dict(identity), u'updated_time': u'2012-07-09T09:13:11Z', u'parameters': parameters, u'outputs': outputs, u'stack_status_reason': u'Stack successfully created', u'creation_time': u'2012-07-09T09:12:45Z', u'stack_name': identity.stack_name, u'notification_topics': [], u'stack_status': u'CREATE_COMPLETE', u'description': u'blah', u'disable_rollback': True, u'timeout_mins':60, u'capabilities': [], } ] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'show_stack', 'args': {'stack_identity': dict(identity)}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() response = self.controller.show(req, tenant_id=identity.tenant, stack_name=identity.stack_name, stack_id=identity.stack_id) expected = { 'stack': { 'links': [{"href": self._url(identity), "rel": "self"}], 'id': '6', u'updated_time': u'2012-07-09T09:13:11Z', u'parameters': parameters, u'outputs': outputs, u'description': u'blah', u'stack_status_reason': u'Stack successfully created', u'creation_time': u'2012-07-09T09:12:45Z', u'stack_name': identity.stack_name, u'stack_status': u'CREATE_COMPLETE', u'capabilities': [], u'notification_topics': [], u'disable_rollback': True, u'timeout_mins': 60, } } self.assertEqual(response, expected) self.m.VerifyAll() def test_show_notfound(self): identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6') req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'show_stack', 'args': {'stack_identity': dict(identity)}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req, tenant_id=identity.tenant, stack_name=identity.stack_name, stack_id=identity.stack_id) self.m.VerifyAll() def test_show_invalidtenant(self): identity = identifier.HeatIdentifier('wibble', 'wordpress', '6') req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'show_stack', 'args': {'stack_identity': dict(identity)}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("InvalidTenant")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPForbidden, self.controller.show, req, tenant_id=identity.tenant, stack_name=identity.stack_name, stack_id=identity.stack_id) self.m.VerifyAll() def test_get_template(self): identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity) template = {u'Foo': u'bar'} self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'get_template', 'args': {'stack_identity': dict(identity)}, 'version': self.api_version}, None).AndReturn(template) self.m.ReplayAll() response = self.controller.template(req, tenant_id=identity.tenant, stack_name=identity.stack_name, stack_id=identity.stack_id) self.assertEqual(response, template) self.m.VerifyAll() def test_get_template_err_notfound(self): identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity) template = {u'Foo': u'bar'} self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'get_template', 'args': {'stack_identity': dict(identity)}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.template, req, tenant_id=identity.tenant, stack_name=identity.stack_name, stack_id=identity.stack_id) self.m.VerifyAll() def test_update(self): identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') stack_name = u'wordpress' stack_id = u'6' template = {u'Foo': u'bar'} json_template = json.dumps(template) parameters = {u'InstanceType': u'm1.xlarge'} body = {'template': template, 'parameters': parameters, 'timeout_mins': 30} req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity, json.dumps(body)) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'update_stack', 'args': {'stack_identity': dict(identity), 'template': template, 'params': parameters, 'args': {'timeout_mins': 30}}, 'version': self.api_version}, None).AndReturn(dict(identity)) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPAccepted, self.controller.update, req, tenant_id=identity.tenant, stack_name=identity.stack_name, stack_id=identity.stack_id, body=body) self.m.VerifyAll() def test_update_bad_name(self): identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6') template = {u'Foo': u'bar'} json_template = json.dumps(template) parameters = {u'InstanceType': u'm1.xlarge'} body = {'template': template, 'parameters': parameters, 'timeout_mins': 30} req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity, json.dumps(body)) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'update_stack', 'args': {'stack_identity': dict(identity), 'template': template, 'params': parameters, 'args': {'timeout_mins': 30}}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.update, req, tenant_id=identity.tenant, stack_name=identity.stack_name, stack_id=identity.stack_id, body=body) self.m.VerifyAll() def test_delete(self): identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') template = {u'Foo': u'bar'} json_template = json.dumps(template) parameters = {u'InstanceType': u'm1.xlarge'} body = {'template': template, 'parameters': parameters, 'timeout_mins': 30} req = self._delete('/stacks/%(stack_name)s/%(stack_id)s' % identity) self.m.StubOutWithMock(rpc, 'call') # Engine returns None when delete successful rpc.call(req.context, self.topic, {'method': 'delete_stack', 'args': {'stack_identity': dict(identity)}, 'version': self.api_version}, None).AndReturn(None) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNoContent, self.controller.delete, req, tenant_id=identity.tenant, stack_name=identity.stack_name, stack_id=identity.stack_id) self.m.VerifyAll() def test_delete_bad_name(self): identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6') template = {u'Foo': u'bar'} json_template = json.dumps(template) parameters = {u'InstanceType': u'm1.xlarge'} body = {'template': template, 'parameters': parameters, 'timeout_mins': 30} req = self._delete('/stacks/%(stack_name)s/%(stack_id)s' % identity) self.m.StubOutWithMock(rpc, 'call') # Engine returns None when delete successful rpc.call(req.context, self.topic, {'method': 'delete_stack', 'args': {'stack_identity': dict(identity)}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete, req, tenant_id=identity.tenant, stack_name=identity.stack_name, stack_id=identity.stack_id) self.m.VerifyAll() def test_validate_template(self): template = {u'Foo': u'bar'} json_template = json.dumps(template) body = {'template': template} req = self._post('/validate', json.dumps(body)) engine_response = { u'Description': u'blah', u'Parameters': [ { u'NoEcho': u'false', u'ParameterKey': u'InstanceType', u'Description': u'Instance type' } ] } self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'validate_template', 'args': {'template': template}, 'version': self.api_version}, None).AndReturn(engine_response) self.m.ReplayAll() response = self.controller.validate_template(req, tenant_id=self.tenant, body=body) self.assertEqual(response, engine_response) self.m.VerifyAll() def test_validate_template_error(self): template = {u'Foo': u'bar'} json_template = json.dumps(template) body = {'template': template} req = self._post('/validate', json.dumps(body)) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'validate_template', 'args': {'template': template}, 'version': self.api_version}, None).AndReturn({'Error': 'fubar'}) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPBadRequest, self.controller.validate_template, req, tenant_id=self.tenant, body=body) self.m.VerifyAll() def test_list_resource_types(self): req = self._get('/resource_types') engine_response = ['AWS::EC2::Instance', 'AWS::EC2::EIP', 'AWS::EC2::EIPAssociation'] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_resource_types', 'args': {}, 'version': self.api_version}, None).AndReturn(engine_response) self.m.ReplayAll() response = self.controller.list_resource_types(req, tenant_id=self.tenant) self.assertEqual(response, {'resource_types': engine_response}) self.m.VerifyAll() def test_list_resource_types_error(self): req = self._get('/resource_types') engine_response = ['AWS::EC2::Instance', 'AWS::EC2::EIP', 'AWS::EC2::EIPAssociation'] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_resource_types', 'args': {}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("ValueError")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPInternalServerError, self.controller.list_resource_types, req, tenant_id=self.tenant) self.m.VerifyAll() @attr(tag=['unit', 'api-openstack-v1', 'ResourceController']) @attr(speed='fast') class ResourceControllerTest(ControllerTest, unittest.TestCase): ''' Tests the API class which acts as the WSGI controller, the endpoint processing API requests after they are routed ''' def setUp(self): # Create WSGI controller instance class DummyConfig(): bind_port = 8004 cfgopts = DummyConfig() self.controller = resources.ResourceController(options=cfgopts) def test_index(self): res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) req = self._get(stack_identity._tenant_path() + '/resources') engine_resp = [ { u'resource_identity': dict(res_identity), u'stack_name': stack_identity.stack_name, u'logical_resource_id': res_name, u'resource_status_reason': None, u'updated_time': u'2012-07-23T13:06:00Z', u'stack_identity': stack_identity, u'resource_status': u'CREATE_COMPLETE', u'physical_resource_id': u'a3455d8c-9f88-404d-a85b-5315293e67de', u'resource_type': u'AWS::EC2::Instance', } ] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_stack_resources', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() result = self.controller.index(req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id) expected = { 'resources': [{'links': [{'href': self._url(res_identity), 'rel': 'self'}, {'href': self._url(stack_identity), 'rel': 'stack'}], u'logical_resource_id': res_name, u'resource_status_reason': None, u'updated_time': u'2012-07-23T13:06:00Z', u'resource_status': u'CREATE_COMPLETE', u'physical_resource_id': u'a3455d8c-9f88-404d-a85b-5315293e67de', u'resource_type': u'AWS::EC2::Instance'}]} self.assertEqual(result, expected) self.m.VerifyAll() def test_index_nonexist(self): stack_identity = identifier.HeatIdentifier(self.tenant, 'rubbish', '1') req = self._get(stack_identity._tenant_path() + '/resources') self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_stack_resources', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.index, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id) self.m.VerifyAll() def test_show(self): res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) req = self._get(stack_identity._tenant_path()) engine_resp = { u'description': u'', u'resource_identity': dict(res_identity), u'stack_name': stack_identity.stack_name, u'logical_resource_id': res_name, u'resource_status_reason': None, u'updated_time': u'2012-07-23T13:06:00Z', u'stack_identity': dict(stack_identity), u'resource_status': u'CREATE_COMPLETE', u'physical_resource_id': u'a3455d8c-9f88-404d-a85b-5315293e67de', u'resource_type': u'AWS::EC2::Instance', u'metadata': {u'ensureRunning': u'true'} } self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'describe_stack_resource', 'args': {'stack_identity': stack_identity, 'resource_name': res_name}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() result = self.controller.show(req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name) expected = { 'resource': { 'links': [ {'href': self._url(res_identity), 'rel': 'self'}, {'href': self._url(stack_identity), 'rel': 'stack'}, ], u'description': u'', u'logical_resource_id': res_name, u'resource_status_reason': None, u'updated_time': u'2012-07-23T13:06:00Z', u'resource_status': u'CREATE_COMPLETE', u'physical_resource_id': u'a3455d8c-9f88-404d-a85b-5315293e67de', u'resource_type': u'AWS::EC2::Instance', } } self.assertEqual(result, expected) self.m.VerifyAll() def test_show_nonexist(self): res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'rubbish', '1') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) req = self._get(res_identity._tenant_path()) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'describe_stack_resource', 'args': {'stack_identity': stack_identity, 'resource_name': res_name}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name) self.m.VerifyAll() def test_show_nonexist_resource(self): res_name = 'Wibble' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) req = self._get(res_identity._tenant_path()) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'describe_stack_resource', 'args': {'stack_identity': stack_identity, 'resource_name': res_name}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("ResourceNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name) self.m.VerifyAll() def test_show_uncreated_resource(self): res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) req = self._get(res_identity._tenant_path()) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'describe_stack_resource', 'args': {'stack_identity': stack_identity, 'resource_name': res_name}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("ResourceNotAvailable")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name) self.m.VerifyAll() def test_metadata_show(self): res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) req = self._get(stack_identity._tenant_path()) engine_resp = { u'description': u'', u'resource_identity': dict(res_identity), u'stack_name': stack_identity.stack_name, u'logical_resource_id': res_name, u'resource_status_reason': None, u'updated_time': u'2012-07-23T13:06:00Z', u'stack_identity': dict(stack_identity), u'resource_status': u'CREATE_COMPLETE', u'physical_resource_id': u'a3455d8c-9f88-404d-a85b-5315293e67de', u'resource_type': u'AWS::EC2::Instance', u'metadata': {u'ensureRunning': u'true'} } self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'describe_stack_resource', 'args': {'stack_identity': stack_identity, 'resource_name': res_name}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() result = self.controller.metadata(req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name) expected = {'metadata': {u'ensureRunning': u'true'}} self.assertEqual(result, expected) self.m.VerifyAll() def test_metadata_show_nonexist(self): res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'rubbish', '1') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) req = self._get(res_identity._tenant_path() + '/metadata') self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'describe_stack_resource', 'args': {'stack_identity': stack_identity, 'resource_name': res_name}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.metadata, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name) self.m.VerifyAll() def test_metadata_show_nonexist_resource(self): res_name = 'wibble' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) req = self._get(res_identity._tenant_path() + '/metadata') self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'describe_stack_resource', 'args': {'stack_identity': stack_identity, 'resource_name': res_name}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("ResourceNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.metadata, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name) self.m.VerifyAll() @attr(tag=['unit', 'api-openstack-v1', 'EventController']) @attr(speed='fast') class EventControllerTest(ControllerTest, unittest.TestCase): ''' Tests the API class which acts as the WSGI controller, the endpoint processing API requests after they are routed ''' def setUp(self): # Create WSGI controller instance class DummyConfig(): bind_port = 8004 cfgopts = DummyConfig() self.controller = events.EventController(options=cfgopts) def test_resource_index(self): event_id = '42' res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) ev_identity = identifier.EventIdentifier(event_id=event_id, **res_identity) req = self._get(stack_identity._tenant_path() + '/resources/' + res_name + '/events') engine_resp = [ { u'stack_name': u'wordpress', u'event_time': u'2012-07-23T13:05:39Z', u'stack_identity': dict(stack_identity), u'logical_resource_id': res_name, u'resource_status_reason': u'state changed', u'event_identity': dict(ev_identity), u'resource_status': u'IN_PROGRESS', u'physical_resource_id': None, u'resource_properties': {u'UserData': u'blah'}, u'resource_type': u'AWS::EC2::Instance', }, { u'stack_name': u'wordpress', u'event_time': u'2012-07-23T13:05:39Z', u'stack_identity': dict(stack_identity), u'logical_resource_id': 'SomeOtherResource', u'resource_status_reason': u'state changed', u'event_identity': dict(ev_identity), u'resource_status': u'IN_PROGRESS', u'physical_resource_id': None, u'resource_properties': {u'UserData': u'blah'}, u'resource_type': u'AWS::EC2::Instance', } ] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_events', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() result = self.controller.index(req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name) expected = { 'events': [ { 'id': event_id, 'links': [ {'href': self._url(ev_identity), 'rel': 'self'}, {'href': self._url(res_identity), 'rel': 'resource'}, {'href': self._url(stack_identity), 'rel': 'stack'}, ], u'logical_resource_id': res_name, u'resource_status_reason': u'state changed', u'event_time': u'2012-07-23T13:05:39Z', u'resource_status': u'IN_PROGRESS', u'physical_resource_id': None, } ] } self.assertEqual(result, expected) self.m.VerifyAll() def test_stack_index(self): event_id = '42' res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) ev_identity = identifier.EventIdentifier(event_id=event_id, **res_identity) req = self._get(stack_identity._tenant_path() + '/events') engine_resp = [ { u'stack_name': u'wordpress', u'event_time': u'2012-07-23T13:05:39Z', u'stack_identity': dict(stack_identity), u'logical_resource_id': res_name, u'resource_status_reason': u'state changed', u'event_identity': dict(ev_identity), u'resource_status': u'IN_PROGRESS', u'physical_resource_id': None, u'resource_properties': {u'UserData': u'blah'}, u'resource_type': u'AWS::EC2::Instance', } ] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_events', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() result = self.controller.index(req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id) expected = { 'events': [ { 'id': event_id, 'links': [ {'href': self._url(ev_identity), 'rel': 'self'}, {'href': self._url(res_identity), 'rel': 'resource'}, {'href': self._url(stack_identity), 'rel': 'stack'}, ], u'logical_resource_id': res_name, u'resource_status_reason': u'state changed', u'event_time': u'2012-07-23T13:05:39Z', u'resource_status': u'IN_PROGRESS', u'physical_resource_id': None, } ] } self.assertEqual(result, expected) self.m.VerifyAll() def test_index_stack_nonexist(self): stack_identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6') req = self._get(stack_identity._tenant_path() + '/events') self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_events', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.index, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id) self.m.VerifyAll() def test_index_resource_nonexist(self): event_id = '42' res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) ev_identity = identifier.EventIdentifier(event_id=event_id, **res_identity) req = self._get(stack_identity._tenant_path() + '/resources/' + res_name + '/events') engine_resp = [ { u'stack_name': u'wordpress', u'event_time': u'2012-07-23T13:05:39Z', u'stack_identity': dict(stack_identity), u'logical_resource_id': 'SomeOtherResource', u'resource_status_reason': u'state changed', u'event_identity': dict(ev_identity), u'resource_status': u'IN_PROGRESS', u'physical_resource_id': None, u'resource_properties': {u'UserData': u'blah'}, u'resource_type': u'AWS::EC2::Instance', } ] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_events', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.index, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name) self.m.VerifyAll() def test_show(self): event_id = '42' res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) ev1_identity = identifier.EventIdentifier(event_id='41', **res_identity) ev_identity = identifier.EventIdentifier(event_id=event_id, **res_identity) req = self._get(stack_identity._tenant_path() + '/resources/' + res_name + '/events/' + event_id) engine_resp = [ { u'stack_name': u'wordpress', u'event_time': u'2012-07-23T13:05:39Z', u'stack_identity': dict(stack_identity), u'logical_resource_id': res_name, u'resource_status_reason': u'state changed', u'event_identity': dict(ev1_identity), u'resource_status': u'IN_PROGRESS', u'physical_resource_id': None, u'resource_properties': {u'UserData': u'blah'}, u'resource_type': u'AWS::EC2::Instance', }, { u'stack_name': u'wordpress', u'event_time': u'2012-07-23T13:06:00Z', u'stack_identity': dict(stack_identity), u'logical_resource_id': res_name, u'resource_status_reason': u'state changed', u'event_identity': dict(ev_identity), u'resource_status': u'CREATE_COMPLETE', u'physical_resource_id': u'a3455d8c-9f88-404d-a85b-5315293e67de', u'resource_properties': {u'UserData': u'blah'}, u'resource_type': u'AWS::EC2::Instance', } ] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_events', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() result = self.controller.show(req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name, event_id=event_id) expected = { 'event': { 'id': event_id, 'links': [ {'href': self._url(ev_identity), 'rel': 'self'}, {'href': self._url(res_identity), 'rel': 'resource'}, {'href': self._url(stack_identity), 'rel': 'stack'}, ], u'logical_resource_id': res_name, u'resource_status_reason': u'state changed', u'event_time': u'2012-07-23T13:06:00Z', u'resource_status': u'CREATE_COMPLETE', u'physical_resource_id': u'a3455d8c-9f88-404d-a85b-5315293e67de', u'resource_type': u'AWS::EC2::Instance', u'resource_properties': {u'UserData': u'blah'}, } } self.assertEqual(result, expected) self.m.VerifyAll() def test_show_nonexist(self): event_id = '42' res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) ev_identity = identifier.EventIdentifier(event_id='41', **res_identity) req = self._get(stack_identity._tenant_path() + '/resources/' + res_name + '/events/' + event_id) engine_resp = [ { u'stack_name': u'wordpress', u'event_time': u'2012-07-23T13:05:39Z', u'stack_identity': dict(stack_identity), u'logical_resource_id': res_name, u'resource_status_reason': u'state changed', u'event_identity': dict(ev_identity), u'resource_status': u'IN_PROGRESS', u'physical_resource_id': None, u'resource_properties': {u'UserData': u'blah'}, u'resource_type': u'AWS::EC2::Instance', } ] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_events', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name, event_id=event_id) self.m.VerifyAll() def test_show_bad_resource(self): event_id = '42' res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6') res_identity = identifier.ResourceIdentifier(resource_name=res_name, **stack_identity) ev_identity = identifier.EventIdentifier(event_id='41', **res_identity) req = self._get(stack_identity._tenant_path() + '/resources/' + res_name + '/events/' + event_id) engine_resp = [ { u'stack_name': u'wordpress', u'event_time': u'2012-07-23T13:05:39Z', u'stack_identity': dict(stack_identity), u'logical_resource_id': 'SomeOtherResourceName', u'resource_status_reason': u'state changed', u'event_identity': dict(ev_identity), u'resource_status': u'IN_PROGRESS', u'physical_resource_id': None, u'resource_properties': {u'UserData': u'blah'}, u'resource_type': u'AWS::EC2::Instance', } ] self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_events', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndReturn(engine_resp) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name, event_id=event_id) self.m.VerifyAll() def test_show_stack_nonexist(self): event_id = '42' res_name = 'WikiDatabase' stack_identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6') req = self._get(stack_identity._tenant_path() + '/resources/' + res_name + '/events/' + event_id) self.m.StubOutWithMock(rpc, 'call') rpc.call(req.context, self.topic, {'method': 'list_events', 'args': {'stack_identity': stack_identity}, 'version': self.api_version}, None).AndRaise(rpc_common.RemoteError("StackNotFound")) self.m.ReplayAll() self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req, tenant_id=self.tenant, stack_name=stack_identity.stack_name, stack_id=stack_identity.stack_id, resource_name=res_name, event_id=event_id) self.m.VerifyAll() class RoutesTest(unittest.TestCase): def assertRoute(self, mapper, path, method, action, controller, params={}): route = mapper.match(path, {'REQUEST_METHOD': method}) self.assertIsNotNone(route) self.assertEqual(action, route['action']) self.assertEqual( controller, route['controller'].controller.__class__.__name__) del(route['action']) del(route['controller']) self.assertEqual(params, route) def setUp(self): self.m = api_v1.API({}).map def test_template_handling(self): self.assertRoute( self.m, '/aaaa/resource_types', 'GET', 'list_resource_types', 'StackController', { 'tenant_id': 'aaaa' }) self.assertRoute( self.m, '/aaaa/validate', 'POST', 'validate_template', 'StackController', { 'tenant_id': 'aaaa' }) def test_stack_collection(self): self.assertRoute( self.m, '/aaaa/stacks', 'GET', 'index', 'StackController', { 'tenant_id': 'aaaa' }) self.assertRoute( self.m, '/aaaa/stacks', 'POST', 'create', 'StackController', { 'tenant_id': 'aaaa' }) def test_stack_data(self): self.assertRoute( self.m, '/aaaa/stacks/teststack', 'GET', 'lookup', 'StackController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack' }) self.assertRoute( self.m, '/aaaa/stacks/arn:openstack:heat::6548ab64fbda49deb188851a3b7d8c8b' ':stacks/stack-1411-06/1c5d9bb2-3464-45e2-a728-26dfa4e1d34a', 'GET', 'lookup', 'StackController', { 'tenant_id': 'aaaa', 'stack_name': 'arn:openstack:heat:' ':6548ab64fbda49deb188851a3b7d8c8b:stacks/stack-1411-06/' '1c5d9bb2-3464-45e2-a728-26dfa4e1d34a' }) self.assertRoute( self.m, '/aaaa/stacks/teststack/resources', 'GET', 'lookup', 'StackController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'path': 'resources' }) self.assertRoute( self.m, '/aaaa/stacks/teststack/events', 'GET', 'lookup', 'StackController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'path': 'events' }) self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb', 'GET', 'show', 'StackController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb', }) self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb/template', 'GET', 'template', 'StackController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb', }) def test_stack_update_delete(self): self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb', 'PUT', 'update', 'StackController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb', }) self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb', 'DELETE', 'delete', 'StackController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb', }) def test_resources(self): self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb/resources', 'GET', 'index', 'ResourceController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb' }) self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb/resources/cccc', 'GET', 'show', 'ResourceController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb', 'resource_name': 'cccc' }) self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb/resources/cccc/metadata', 'GET', 'metadata', 'ResourceController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb', 'resource_name': 'cccc' }) def test_events(self): self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb/events', 'GET', 'index', 'EventController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb' }) self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb/resources/cccc/events', 'GET', 'index', 'EventController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb', 'resource_name': 'cccc' }) self.assertRoute( self.m, '/aaaa/stacks/teststack/bbbb/resources/cccc/events/dddd', 'GET', 'show', 'EventController', { 'tenant_id': 'aaaa', 'stack_name': 'teststack', 'stack_id': 'bbbb', 'resource_name': 'cccc', 'event_id': 'dddd' })
apache-2.0
6,338,218,931,821,207,000
37.831791
79
0.496664
false
4.293349
true
false
false
gcblue/gcblue
scripts/PointDefense.py
1
2779
from UnitCommands import * from GroupCommands import * import math import random def GetPointDefenseLaunchers(UI): # build list of all launcher info launcher_list = [] nLaunchers = UI.GetLauncherCount() for n in range(0, nLaunchers): launcher_info = UI.GetLauncherInfo(n) if ((launcher_info.Status == 0) and (launcher_info.TargetFlags & 0x0008) and (launcher_info.MinRange_km < 1.0)): launcher_list.append(launcher_info) return launcher_list def GetPointDefenseTargets(UI): class_mask = 0x0060 # missiles and aircraft range_km = 8.0 affiliation = 3 # hostile track_list = UI.GetTrackList(class_mask, range_km, affiliation) targets = [] current_time = UI.GetTime() max_engaged_count = 10 nTracks = track_list.Size() for n in range(0, nTracks): track_info = track_list.GetTrack(n) track_id = track_info.ID staleness = current_time - track_info.Time is_destroyed = track_info.IsDestroyed() bearing_only = track_info.IsBearingOnly() engaged_count = track_info.GetEngagedCount() is_air_target = track_info.IsAir() or track_info.IsMissile() if ((engaged_count < max_engaged_count) and (staleness < 15.0) and (not bearing_only) and (not is_destroyed) and is_air_target): targets.append(track_info) return targets # script to augment EngageAll for better point defense reaction def PointDefense(UI): target_list = GetPointDefenseTargets(UI) nTargets = len(target_list) if (nTargets == 0): return launcher_list = GetPointDefenseLaunchers(UI) for k in range(0, len(launcher_list)): launcher_info = launcher_list[k] launcher_idx = launcher_info.Launcher # engage first target in random target order target_order = random.sample(range(0, nTargets), nTargets) selected_target = -1 for n in target_order: track_info = target_list[n] target_range = UI.GetRangeToTrack(track_info) launch_range = launcher_info.Range_km # reference max range, use for launch decision #UI.DisplayMessage('Best launcher %d' % launcher_idx) if (launcher_idx != -1): target_range = UI.GetRangeToTarget() launch_range = launcher_info.Range_km # reference max range, use for launch decision if ((selected_target == -1) and (target_range < launch_range)): selected_target = track_info.ID break if (selected_target != -1): UI.SendTargetToLauncher(selected_target, launcher_idx) UI.Launch(launcher_idx, 1)
bsd-3-clause
-7,219,447,971,578,728,000
34.177215
136
0.618928
false
3.705333
false
false
false
mmlab/eice
EiCGraphAlgo/core/search.py
1
13717
import time,gc,logging,pickle,os,sys,math from urllib.parse import urlparse from core.worker_pool import Worker from core.pathfinder_async import PathFinder from core import randompath, graph from core import config_search logger = logging.getLogger('pathFinder') query_log = logging.getLogger('query') blacklist = config_search.blacklist class Searcher: def __init__(self): self.logger = logging.getLogger('pathFinder') self.query_log = logging.getLogger('query') def search(self, start,dest,search_blacklist=blacklist,givenP=None,additionalRes=set(),k = 20,user_context=False,kp=75): """Searches a path between two resources start and dest **Parameters** start : uri resource to start pathfinding destination : uri destination resource for pathfinding search_blacklist : list list of resources to exclude in search givenP : Pathfinder a given pathfinder state for complex search queries k : integer number of iterations when to break off search user_context : uri a third resource to compute the score of the path in the context of the third resource. **Returns** response : dictionary contains execution time, path if found, hash """ #print ('starting search') #START start_time = time.clock() #Initialization if givenP == None: p = PathFinder(start,dest) p.iterateMatrix(search_blacklist,kp=kp) else: p = givenP p.iterateMatrix(blacklist=search_blacklist,additionalRes=additionalRes,kp=kp) #Iteration 1 paths = p.findPath() #Following iterations while True: if not paths == None: if len(paths) > 0: break self.logger.info ('=== %s-- ===' % str(p.iteration)) gc.collect() m = p.iterateMatrix(blacklist=search_blacklist,kp=kp) halt_path = time.clock() paths = p.findPath() self.logger.info ('Looking for path: %s' % str(time.clock()-halt_path)) if p.iteration == k: break resolvedPaths = list() #FINISH if paths: for path in paths: # logger.debug(path) resolvedPath = graph.resolvePath(path,p.getResources()) resolvedLinks = graph.resolveLinks(resolvedPath, p.getResourcesByParent()) formattedPath = list() for step in resolvedPath: formattedPath.append(step[1:-1]) fullPath = dict() fullPath['vertices'] = formattedPath fullPath['edges'] = resolvedLinks resolvedPaths.append(fullPath) else: return {'path':False,'source':start,'destination':dest,'execution_time':int(round((time.clock()-start_time) * 1000))} # graph.visualize(p, path=path) finish = int(round((time.clock()-start_time) * 1000)) r = dict() r['execution_time'] = finish r['paths'] = resolvedPaths r['source'] = start r['destination'] = dest r['checked_resources'] = p.checked_resources r['hash'] = 'h%s' % hash('{0}{1}{2}'.format(start_time,dest,time.time())) r['path'] = graph.listPath(resolvedPath,p.getResourcesByParent()) l = 0 c = 0 refcount = 0 usercount = 0 u = 0 for step in r['path']: if l > 2 and l % 2 == 1: c+=1 m = urlparse(r['path'][l]['uri']) m_p = urlparse(r['path'][l-2]['uri']) if m.netloc not in r['path'][l-2]['uri']: refcount += 1/2 refcount += p.jaccard_distance(m.path, m_p.path)/2 l+=1 if user_context and l % 2 == 0: u += 1 step = r['path'][l]['uri'] user_path = self.search(user_context,step,search_blacklist=search_blacklist,givenP=givenP,additionalRes=additionalRes,k = 6) if user_path['path']: usercount += 1 / (math.floor(len(user_path['path'])-1)/2) else: usercount += 0 if l > 0: r['novelty'] = 0 if c > 0: r['novelty'] = refcount / c if u > 0: r['personal_context'] = usercount / u try: path = os.path.dirname(os.path.abspath(__file__)) file = r['hash'] file_path = "{0}/stored_paths/{1}.dump".format(path,file) f = open(file_path,"wb") pickle.dump(r,f) f.close() except: self.logger.warning('could not log and store path between {0} and {1}'.format(start,dest)) self.logger.error(sys.exc_info()) self.query_log.info(r) self.logger.debug(r) result = dict() result['path'] = r['path'] result['hash'] = r['hash'] result['execution_time'] = r['execution_time'] result['source'] = r['source'] result['destination'] = r['destination'] if 'novelty' in r: result['novelty'] = r['novelty'] if 'personal_context' in r: result['user_context'] = r['personal_context'] return result class DeepSearcher: def __init__(self): self.searcher = Searcher() def searchAllPaths(self, start,dest,search_blacklist=blacklist): #START start_time = time.clock() #RUN paths = list() prevLenBlacklist = set(search_blacklist) path = self.searcher.search(start,dest,prevLenBlacklist) new_blacklist = self.generateBlackList(prevLenBlacklist,path) paths.append(path) while len(new_blacklist) > len (prevLenBlacklist): path = self.searcher.search(start,dest,new_blacklist) prevLenBlacklist = set(new_blacklist) new_blacklist = self.generateBlackList(new_blacklist,path) if not path['path'] == False: paths.append(path) result=dict() result['paths']=paths result['num_found']=len(paths) finish = int(round((time.clock()-start_time) * 1000)) result['execution_time']=finish return result def generateBlackList(self, blacklist,response): """Expands a given blacklist with a found response""" new_blacklist = set(blacklist) if not response['path'] == False: l = int(len(response['path'])/2) for step in response['path'][l-1:l+1]: if step['type'] == 'link': #print (step['uri']) new_blacklist.add('<%s>' % step['uri']) return new_blacklist def flattenSearchResults(self, response): flattened_path = list() if not response['path'] == False: for step in response['path']: if step['type'] == 'node': #print (step['uri']) flattened_path.append('<%s>' % step['uri']) return flattened_path def searchDeep(self, start,dest,search_blacklist=blacklist,k=5,s=3,user_context=False): """Searches a path between two resources start and dest **Parameters** same as regular search s: integer strength of deepness, how many nodes to trigger for deep search """ #START start_time = time.clock() p = PathFinder(start,dest) result = self.searcher.search(start,dest,search_blacklist=search_blacklist,givenP=p,k=k,user_context=user_context) if not result['path']: logger.debug (p.resources) deep_roots = p.iterateOptimizedNetwork(s) logger.debug (deep_roots) print (deep_roots) additionalResources = set() for st in deep_roots['start']: for dt in deep_roots['dest']: logger.debug ("extra path between %s and %s" % (st,dt)) print ("extra path between %s and %s" % (st,dt)) additionalResources = additionalResources.union(set(self.flattenSearchResults(self.searcher.search(st,dt,k=3*k)))) result=self.searcher.search(start,dest,search_blacklist=search_blacklist,givenP=p,additionalRes=additionalResources,k = k,user_context=user_context) finish = int(round((time.clock()-start_time) * 1000)) result['execution_time'] = finish return result class FallbackSearcher: def __init__(self, worker=Worker(),searcher=Searcher()): self.worker =worker self.searcher=searcher def searchFallback(self,source,destination): resp = dict() logger.info('Using fallback using random hubs, because no path directly found') path_between_hubs = False while not path_between_hubs: start = time.clock() worker_output = dict() hubs = randompath.randomSourceAndDestination() self.worker.startQueue(self.searchF, 3) self.worker.queueFunction(self.searchF,[hubs['source'],hubs['destination'],worker_output,'path_between_hubs']) self.worker.queueFunction(self.searchF,[source,hubs['source'],worker_output,'path_to_hub_source']) self.worker.queueFunction(self.searchF,[hubs['destination'],destination,worker_output,'path_to_hub_destination']) self.worker.waitforFunctionsFinish(self.searchF) path_between_hubs = worker_output['path_between_hubs'] path_to_hub_source = worker_output['path_to_hub_source'] path_to_hub_destination = worker_output['path_to_hub_destination'] if path_to_hub_source['path'] == False or path_to_hub_destination['path'] == False: path_between_hubs = False gc.collect() time.sleep(1) resp['execution_time'] = str(int(round((time.clock()-start) * 1000))) resp['source'] = source resp['destination'] = destination resp['path'] = list() resp['path'].extend(path_to_hub_source['path'][:-1]) resp['path'].extend(path_between_hubs['path']) resp['path'].extend(path_to_hub_destination['path'][1:]) resp['hash'] = False return resp def searchF(self, source, destination, target, index): try: target[index] = self.searcher.search(source,destination) except: target[index] = dict() target[index]['path'] = False logger.error(sys.exc_info()) logger.error('path between {0} and {1} not found.'.format(source, destination)) #r = search(start,dest) # #p = r['path'] #time = r['execution_time'] # #print (str(time)+' ms') #print (p) # #if paths: # graph.visualize(p, path=path) #else: # graph.visualize(p) #print (searchFallback('http://dbpedia.org/resource/Brussels','http://dbpedia.org/resource/Belgium')) #path = search('http://dbpedia.org/resource/Brussels','http://dbpedia.org/resource/Belgium',blacklist) #print (len(blacklist)) #print (len(new_blacklist)) #print (new_blacklist) #path = search('http://dbpedia.org/resource/Brussels','http://dbpedia.org/resource/Belgium',new_blacklist) ##print (len(new_blacklist)) #print (DeepSearcher().searchDeep('http://dbpedia.org/resource/Ireland','http://dbpedia.org/resource/Brussels',blacklist)) #print("search")python profiling like webgrind #searcher = Searcher() #print (searcher.search('http://dblp.l3s.de/d2r/resource/authors/Tok_Wang_Ling','http://dblp.l3s.de/d2r/resource/publications/conf/cikm/LiL05a',blacklist)) #print (searcher.search('http://dbpedia.org/resource/Brussels','http://dbpedia.org/resource/Gorillaz',blacklist)) #print (searcher.search('http://dbpedia.org/resource/New_York','http://dbpedia.org/resource/Ireland',blacklist)) #print (searcher.search('http://dbpedia.org/resource/Ohio','http://dbpedia.org/resource/Japan',blacklist)) #print (searcher.search('http://dbpedia.org/resource/Japan','http://dbpedia.org/resource/Tokyo',blacklist)) #print (searcher.search('http://dbpedia.org/resource/Ohio','http://dbpedia.org/resource/Tokyo',blacklist)) #print (searcher.search('http://dbpedia.org/resource/Paris','http://dbpedia.org/resource/Barack_Obama',blacklist)) #print (searcher.search('http://dbpedia.org/resource/Belgium','http://dbpedia.org/resource/Republic_Of_Congo',blacklist)) #print (DeepSearcher().searchAllPaths('http://dbpedia.org/resource/Belgium','http://dbpedia.org/resource/Ireland',blacklist)) #print (searcher.search('http://localhost/selvers','http://localhost/welf',blacklist)) #print (DeepSearcher().searchAllPaths('http://dbpedia.org/resource/Belgium','http://dbpedia.org/resource/Ireland',blacklist)) #print (searcher.search('http://dbpedia.org/resource/Brussels','http://dblp.l3s.de/d2r/resource/authors/Tok_Wang_Ling',blacklist)) #print (DeepSearcher().searchDeep('http://dblp.l3s.de/d2r/resource/authors/Tok_Wang_Ling','http://dbpedia.org/resource/Brussels',blacklist)) #print (searcher.search('http://dblp.l3s.de/d2r/resource/authors/Tok_Wang_Ling','http://dblp.l3s.de/d2r/resource/publications/conf/cikm/LiL05a',blacklist)) #print (search('http://dblp.l3s.de/d2r/resource/authors/Changqing_Li','http://dblp.l3s.de/d2r/resource/authors/Tok_Wang_Ling',blacklist))
agpl-3.0
-3,783,267,506,170,892,000
41.735202
160
0.5908
false
3.753968
false
false
false
stvstnfrd/edx-platform
openedx/tests/xblock_integration/test_done.py
5
3815
''' Tests for the DoneXBlock. This is nice as a simple example of the edX XBlock test framework. ''' from openedx.tests.xblock_integration.xblock_testcase import XBlockTestCase # pylint: disable=abstract-method class TestDone(XBlockTestCase): """ Simple tests for the completion XBlock. We set up a page with two of the block, make sure the page renders, toggle them a few times, make sure they've toggled, and reconfirm the page renders. """ olx_scenarios = { # Currently not used "two_done_block_test_case": """<vertical> <done urlname="done0"/> <done urlname="done1"/> </vertical>""" } # This is a stop-gap until we can load OLX and/or OLX from # normal workbench scenarios test_configuration = [ { "urlname": "two_done_block_test_case_0", #"olx": self.olx_scenarios[0], "xblocks": [ # Stopgap until we handle OLX { 'blocktype': 'done', 'urlname': 'done_0' } ] }, { "urlname": "two_done_block_test_case_1", #"olx": self.olx_scenarios[0], "xblocks": [ # Stopgap until we handle OLX { 'blocktype': 'done', 'urlname': 'done_1' } ] } ] def toggle_button(self, block, data, desired_state): """ Make an AJAX call to the XBlock, and assert the state is as desired. """ resp = self.ajax('toggle_button', block, data) assert resp.status_code == 200 # pylint: disable=comparison-with-callable assert resp.data == {'state': desired_state} # pylint: disable=comparison-with-callable # pylint: disable=unused-argument def check_response(self, block_urlname, rendering): """ Confirm that we have a 200 response code (no server error) In the future, visual diff test the response. """ response = self.render_block(block_urlname) assert response.status_code == 200 # pylint: disable=comparison-with-callable # To do: Below method needs to be implemented #self.assertXBlockScreenshot(block_urlname, rendering) def test_done(self): """ Walk through a few toggles. Make sure the blocks don't mix up state between them, initial state is correct, and final state is correct. """ # We confirm we don't have errors rendering the student view self.check_response('done_0', 'done-unmarked') self.check_response('done_1', 'done-unmarked') # We confirm the block is initially false self.toggle_button('done_0', {}, False) self.reset_published_events() self.toggle_button('done_1', {}, False) self.assert_no_events_published("edx.done.toggled") # We confirm we can toggle state both ways self.reset_published_events() self.toggle_button('done_0', {'done': True}, True) self.assert_event_published('edx.done.toggled', event_fields={"done": True}) self.reset_published_events() self.toggle_button('done_1', {'done': False}, False) self.assert_event_published('edx.done.toggled', event_fields={"done": False}) self.toggle_button('done_0', {'done': False}, False) self.assert_grade(0) self.toggle_button('done_1', {'done': True}, True) self.assert_grade(1) # We confirm state sticks around self.toggle_button('done_0', {}, False) self.toggle_button('done_1', {}, True) # And confirm we render correctly self.check_response('done_0', 'done-unmarked') self.check_response('done_1', 'done-marked')
agpl-3.0
2,928,645,477,135,331,300
34.990566
96
0.585059
false
3.949275
true
false
false
shawnhermans/cyborg-identity-manager
cyborg_identity/location/apps.py
1
1555
from django.apps import AppConfig from django_graph.registry import registry import sys from cyborg_identity import router current_module = sys.modules[__name__] class CyborgIdentityLocationConfig(AppConfig): name = 'cyborg_identity.location' def ready(self): Geohash = self.get_model('GeoHash') DeliveryPointBarcode = self.get_model('DeliveryPointBarcode') Address = self.get_model('Address') ResidesAt = self.get_model('ResidesAt') BornAt = self.get_model('BornAt') LivedNear = self.get_model('LivedNear') registry.register_model_with_api( current_module, Geohash, router, 'location/geohash' ) registry.register_model_with_api( current_module, DeliveryPointBarcode, router, 'location/delivery-point-barcode' ) registry.register_model_with_api( current_module, Address, router, 'location/address', register_admin=True ) registry.register_model_with_api( current_module, ResidesAt, router, 'location/resides-at' ) registry.register_model_with_api( current_module, BornAt, router, 'location/born-at' ) registry.register_model_with_api( current_module, LivedNear, router, 'location/lived-near' )
mit
5,853,332,208,678,880,000
24.916667
69
0.554984
false
4.146667
false
false
false
allenai/allennlp
allennlp/training/metrics/auc.py
1
5313
from typing import Optional from overrides import overrides import torch import torch.distributed as dist from sklearn import metrics from allennlp.common.util import is_distributed from allennlp.common.checks import ConfigurationError from allennlp.training.metrics.metric import Metric @Metric.register("auc") class Auc(Metric): """ The AUC Metric measures the area under the receiver-operating characteristic (ROC) curve for binary classification problems. """ def __init__(self, positive_label=1): super().__init__() self._positive_label = positive_label self._all_predictions = torch.FloatTensor() self._all_gold_labels = torch.LongTensor() def __call__( self, predictions: torch.Tensor, gold_labels: torch.Tensor, mask: Optional[torch.BoolTensor] = None, ): """ # Parameters predictions : `torch.Tensor`, required. A one-dimensional tensor of prediction scores of shape (batch_size). gold_labels : `torch.Tensor`, required. A one-dimensional label tensor of shape (batch_size), with {1, 0} entries for positive and negative class. If it's not binary, `positive_label` should be passed in the initialization. mask : `torch.BoolTensor`, optional (default = `None`). A one-dimensional label tensor of shape (batch_size). """ predictions, gold_labels, mask = self.detach_tensors(predictions, gold_labels, mask) # Sanity checks. if gold_labels.dim() != 1: raise ConfigurationError( "gold_labels must be one-dimensional, " "but found tensor of shape: {}".format(gold_labels.size()) ) if predictions.dim() != 1: raise ConfigurationError( "predictions must be one-dimensional, " "but found tensor of shape: {}".format(predictions.size()) ) unique_gold_labels = torch.unique(gold_labels) if unique_gold_labels.numel() > 2: raise ConfigurationError( "AUC can be used for binary tasks only. gold_labels has {} unique labels, " "expected at maximum 2.".format(unique_gold_labels.numel()) ) gold_labels_is_binary = set(unique_gold_labels.tolist()) <= {0, 1} if not gold_labels_is_binary and self._positive_label not in unique_gold_labels: raise ConfigurationError( "gold_labels should be binary with 0 and 1 or initialized positive_label " "{} should be present in gold_labels".format(self._positive_label) ) if mask is None: batch_size = gold_labels.shape[0] mask = torch.ones(batch_size, device=gold_labels.device).bool() self._all_predictions = self._all_predictions.to(predictions.device) self._all_gold_labels = self._all_gold_labels.to(gold_labels.device) self._all_predictions = torch.cat( [self._all_predictions, torch.masked_select(predictions, mask).float()], dim=0 ) self._all_gold_labels = torch.cat( [self._all_gold_labels, torch.masked_select(gold_labels, mask).long()], dim=0 ) if is_distributed(): world_size = dist.get_world_size() device = gold_labels.device # Check if batch lengths are equal. _all_batch_lengths = [torch.tensor(0) for i in range(world_size)] dist.all_gather( _all_batch_lengths, torch.tensor(len(self._all_predictions), device=device) ) _all_batch_lengths = [batch_length.item() for batch_length in _all_batch_lengths] if len(set(_all_batch_lengths)) > 1: # Subsequent dist.all_gather() calls currently do not handle tensors of different length. raise RuntimeError( "Distributed aggregation for AUC is currently not supported for batches of unequal length." ) _all_predictions = [ torch.zeros(self._all_predictions.shape, device=device) for i in range(world_size) ] _all_gold_labels = [ torch.zeros(self._all_gold_labels.shape, device=device, dtype=torch.long) for i in range(world_size) ] dist.all_gather(_all_predictions, self._all_predictions) dist.all_gather(_all_gold_labels, self._all_gold_labels) self._all_predictions = torch.cat(_all_predictions, dim=0) self._all_gold_labels = torch.cat(_all_gold_labels, dim=0) def get_metric(self, reset: bool = False): if self._all_gold_labels.shape[0] == 0: return 0.5 false_positive_rates, true_positive_rates, _ = metrics.roc_curve( self._all_gold_labels.cpu().numpy(), self._all_predictions.cpu().numpy(), pos_label=self._positive_label, ) auc = metrics.auc(false_positive_rates, true_positive_rates) if reset: self.reset() return auc @overrides def reset(self): self._all_predictions = torch.FloatTensor() self._all_gold_labels = torch.LongTensor()
apache-2.0
6,154,903,549,579,065,000
38.355556
111
0.599097
false
4.1218
false
false
false
sony/nnabla
python/test/solver/test_amsbound.py
1
3174
# Copyright 2019,2020,2021 Sony Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import nnabla.solvers as S import numpy as np from solver_test_utils import solver_tester, RefSolver from nbla_test_utils import list_context ctxs = list_context('AMSBound') class RefAMSBound(RefSolver): def __init__(self, alpha, beta1, beta2, eps, final_lr, gamma, bias_correction): self.alpha = alpha self.init_alpha = alpha self.beta1 = beta1 self.beta2 = beta2 self.eps = eps self.final_lr = final_lr self.gamma = gamma self.bias_correction = bias_correction self.m = {} self.v = {} self.v_hat = {} self.t = {} def _set_state_impl(self, key, param): self.m[key] = np.zeros_like(param) self.v[key] = np.zeros_like(param) self.v_hat[key] = np.zeros_like(param) self.t[key] = 0 def _update_impl(self, key, p, g): self.t[key] = min(self.t[key] + 1, np.iinfo(np.int32).max) _update_amsbound(p, g, self.m[key], self.v[key], self.v_hat[key], self.t[key], self.alpha, self.init_alpha, self.beta1, self.beta2, self.eps, self.final_lr, self.gamma, self.bias_correction) def _update_amsbound(p, g, m, v, v_hat, t, alpha, init_alpha, beta1, beta2, eps, final_lr, gamma, bias_correction): if bias_correction: alpha_t = alpha * \ np.sqrt(1. - beta2 ** t) / (1. - beta1 ** t) else: alpha_t = alpha final_lr_ = final_lr * (alpha / init_alpha) m[...] = beta1 * m + (1 - beta1) * g v[...] = beta2 * v + (1 - beta2) * g * g v_hat[...] = np.maximum(v_hat, v) denom = np.sqrt(v_hat) + eps lb = final_lr_ * (1 - 1 / (gamma*t + 1)) ub = final_lr_ * (1 + 1 / (gamma*t)) eta = np.clip(alpha_t/denom, lb, ub) p[...] = p - eta * m @pytest.mark.parametrize("ctx, solver_name", ctxs) @pytest.mark.parametrize("decay", [1e-4]) @pytest.mark.parametrize("alpha", [1e-2, 1e-4]) @pytest.mark.parametrize("beta1, beta2", [(0.9, 0.999), (0.999, 0.9)]) @pytest.mark.parametrize("eps", [1e-8]) @pytest.mark.parametrize("final_lr", [0.1]) @pytest.mark.parametrize("gamma", [0.001]) @pytest.mark.parametrize("bias_correction", [True, False]) @pytest.mark.parametrize("seed", [313]) def test_amsbound(seed, alpha, beta1, beta2, eps, final_lr, gamma, bias_correction, decay, ctx, solver_name): rng = np.random.RandomState(seed) solver_tester( rng, S.AMSBound, RefAMSBound, [alpha, beta1, beta2, eps, final_lr, gamma, bias_correction], atol=1e-3, ctx=ctx, solver_name=solver_name)
apache-2.0
1,245,892,378,135,716,400
37.240964
136
0.619408
false
3.046065
true
false
false
hpparvi/PyTransit
pytransit/lpf/ttvlpf.py
1
6189
# PyTransit: fast and easy exoplanet transit modelling in Python. # Copyright (C) 2010-2019 Hannu Parviainen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. import seaborn as sb from matplotlib.pyplot import subplots, setp from numpy import pi, sign, cos, sqrt, sin, array, arccos, inf, round, int, s_, percentile, concatenate, median, mean, \ arange, poly1d, polyfit from numba import njit, prange from .lpf import BaseLPF from ..models.transitmodel import TransitModel from ..param.parameter import ParameterSet, PParameter, GParameter from ..param.parameter import UniformPrior as U, NormalPrior as N, GammaPrior as GM from ..orbits.orbits_py import as_from_rhop with_seaborn = True @njit("f8[:](f8[:], f8, f8, f8, f8[:], i8[:])", cache=False, parallel=False) def z_circular_ttv(t, p, a, i, tc, tcid): cosph = cos(2*pi * (t - tc[tcid]) / p) z = sign(cosph) * a * sqrt(1.0 - cosph * cosph * sin(i) ** 2) return z def plot_estimates(x, p, ax, bwidth=0.8): ax.bar(x, p[4, :] - p[3, :], bwidth, p[3, :], alpha=0.25, fc='b') ax.bar(x, p[2, :] - p[1, :], bwidth, p[1, :], alpha=0.25, fc='b') [ax.plot((xx - 0.47 * bwidth, xx + 0.47 * bwidth), (pp[[0, 0]]), 'k') for xx, pp in zip(x, p.T)] class TTVLPF(BaseLPF): """Log posterior function for TTV estimation. A log posterior function for TTV estimation. Each light curve represents a single transit, and is given a separate free transit centre parameter. The average orbital period and (one) transit zero epoch are assumed as known. Notes: The number of parameters can grow large with Kepler short-period planets. """ def __init__(self, target: str, zero_epoch: float, period: float, tc_sigma: float, passbands: list, times: list = None, fluxes: list = None, pbids: list = None, tm: TransitModel = None, nsamples: int = 1, exptime: float = 0.020433598): self.zero_epoch = zero_epoch self.period = period self.tc_sigma = tc_sigma super().__init__(target, passbands, times, fluxes, pbids, tm, nsamples, exptime) def _init_p_orbit(self): """Orbit parameter initialisation for a TTV model. """ porbit = [GParameter('rho', 'stellar_density', 'g/cm^3', U(0.1, 25.0), (0, inf)), GParameter('b', 'impact_parameter', 'R_s', U(0.0, 1.0), (0, 1))] s = self.tc_sigma self.tnumber = round((array([t.mean() for t in self.times]) - self.zero_epoch) / self.period).astype(int) tcs = self.period * self.tnumber + self.zero_epoch for tc, tn in zip(tcs, self.tnumber): porbit.append(GParameter(f'tc_{tn:d}', f'transit_centre_{tn:d}', 'd', N(tc, s), (-inf, inf))) self.ps.add_global_block('orbit', porbit) self._start_tc = 2 self._sl_tc = s_[self._start_tc:self._start_tc + self.nlc] def optimize_times(self, window): times, fluxes, pbids = [], [], [] tcp = self.ps[self._sl_tc] for i in range(self.nlc): tc = tcp[i].prior.mean mask = abs(self.times[i] - tc) < 0.5*window/24. times.append(self.times[i][mask]) fluxes.append(self.fluxes[i][mask]) self._init_data(times, fluxes, self.pbids) def _compute_z(self, pv): a = as_from_rhop(pv[0], self.period) if a < 1.: return None else: i = arccos(pv[1] / a) tc = pv[self._sl_tc] return z_circular_ttv(self.timea, self.period, a, i, tc, self.lcida) def plot_light_curve(self, ax=None, figsize=None, time=False): fig, ax = (None, ax) if ax is not None else subplots(figsize=figsize) time = self.timea_orig if time else arange(self.timea_orig.size) ax.plot(time, concatenate(self.fluxes)) ax.plot(time, concatenate(self.flux_model(self.de.minimum_location))) fig.tight_layout() return ax def posterior_period(self, burn: int = 0, thin: int = 1) -> float: df = self.posterior_samples(burn, thin, derived_parameters=False) tccols = [c for c in df.columns if 'tc' in c] tcs = median(df[tccols], 0) return mean((tcs[1:] - tcs[0]) / (self.tnumber[1:] - self.tnumber[0])) def plot_ttvs(self, burn=0, thin=1, axs=None, figsize=None, bwidth=0.8, fmt='h', windows=None): assert fmt in ('d', 'h', 'min') multiplier = {'d': 1, 'h': 24, 'min': 1440} ncol = 1 if windows is None else len(windows) fig, axs = (None, axs) if axs is not None else subplots(1, ncol, figsize=figsize, sharey=True) df = self.posterior_samples(burn, thin) tccols = [c for c in df.columns if 'tc' in c] tcs = median(df[tccols], 0) lineph = poly1d(polyfit(self.tnumber, tcs, 1)) tc_linear = lineph(self.tnumber) p = multiplier[fmt] * percentile(df[tccols] - tc_linear, [50, 16, 84, 0.5, 99.5], 0) setp(axs, ylabel='Transit center - linear prediction [{}]'.format(fmt), xlabel='Transit number') if windows is None: plot_estimates(self.tnumber, p, axs, bwidth) if with_seaborn: sb.despine(ax=axs, offset=15) else: setp(axs[1:], ylabel='') for ax, w in zip(axs, windows): m = (self.tnumber > w[0]) & (self.tnumber < w[1]) plot_estimates(self.tnumber[m], p[:, m], ax, bwidth) setp(ax, xlim=w) if with_seaborn: sb.despine(ax=ax, offset=15) if fig: fig.tight_layout() return axs
gpl-2.0
8,891,468,421,198,459,000
44.844444
120
0.605267
false
3.091409
false
false
false
nirmeshk/oh-mainline
mysite/profile/migrations/0036_make_person_nullable.py
17
12132
# This file is part of OpenHatch. # Copyright (C) 2009 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from south.db import db from django.db import models from mysite.profile.models import * class Migration: def forwards(self, orm): # Adding field 'DataImportAttempt.person' db.add_column('profile_dataimportattempt', 'person', orm['profile.dataimportattempt:person']) # Adding field 'DataImportAttempt.person_wants_data' db.add_column('profile_dataimportattempt', 'person_wants_data', orm['profile.dataimportattempt:person_wants_data']) # Adding field 'DataImportAttempt.failed' db.add_column('profile_dataimportattempt', 'failed', orm['profile.dataimportattempt:failed']) # Adding field 'DataImportAttempt.query' db.add_column('profile_dataimportattempt', 'query', orm['profile.dataimportattempt:query']) # Deleting field 'Person.ohloh_grab_completed' db.delete_column('profile_person', 'ohloh_grab_completed') # Changing field 'ProjectExp.person' # (to signature: django.db.models.fields.related.ForeignKey(to=orm['profile.Person'], null=True)) db.alter_column('profile_projectexp', 'person_id', orm['profile.projectexp:person']) def backwards(self, orm): # Deleting field 'DataImportAttempt.person' db.delete_column('profile_dataimportattempt', 'person_id') # Deleting field 'DataImportAttempt.person_wants_data' db.delete_column('profile_dataimportattempt', 'person_wants_data') # Deleting field 'DataImportAttempt.failed' db.delete_column('profile_dataimportattempt', 'failed') # Deleting field 'DataImportAttempt.query' db.delete_column('profile_dataimportattempt', 'query') # Adding field 'Person.ohloh_grab_completed' db.add_column('profile_person', 'ohloh_grab_completed', orm['profile.person:ohloh_grab_completed']) # Changing field 'ProjectExp.person' # (to signature: django.db.models.fields.related.ForeignKey(to=orm['profile.Person'])) db.alter_column('profile_projectexp', 'person_id', orm['profile.projectexp:person']) models = { 'profile.person': { 'gotten_name_from_ohloh': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'interested_in_working_on': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024'}), 'last_polled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1970, 1, 1, 0, 0)'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}) }, 'profile.link_person_tag': { 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']"}), 'source': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Tag']"}) }, 'profile.sourceforgeproject': { 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'unixname': ('django.db.models.fields.CharField', [], {'max_length': '200'}) }, 'profile.tag': { 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'tag_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.TagType']"}), 'text': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'profile.link_projectexp_tag': { 'Meta': {'unique_together': "[('tag', 'project_exp', 'source')]"}, 'favorite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'project_exp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.ProjectExp']"}), 'source': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Tag']"}) }, 'profile.link_project_tag': { 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}), 'source': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Tag']"}) }, 'profile.sourceforgeperson': { 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'max_length': '200'}) }, 'search.project': { 'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}) }, 'profile.dataimportattempt': { 'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'failed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']", 'null': 'True'}), 'person_wants_data': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'query': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'source': ('django.db.models.fields.CharField', [], {'max_length': '2'}) }, 'auth.user': { 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2009, 7, 22, 0, 59, 1, 182078)'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2009, 7, 22, 0, 59, 1, 181934)'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'auth.permission': { 'Meta': {'unique_together': "(('content_type', 'codename'),)"}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'contenttypes.contenttype': { 'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'profile.link_sf_proj_dude_fm': { 'Meta': {'unique_together': "[('person', 'project')]"}, 'date_collected': ('django.db.models.fields.DateTimeField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.SourceForgePerson']"}), 'position': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.SourceForgeProject']"}) }, 'profile.tagtype': { 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'prefix': ('django.db.models.fields.CharField', [], {'max_length': '20'}) }, 'auth.group': { 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}) }, 'profile.projectexp': { 'data_import_attempt': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.DataImportAttempt']", 'null': 'True'}), 'description': ('django.db.models.fields.TextField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'man_months': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']", 'null': 'True'}), 'person_role': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'primary_language': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}), 'should_show_this': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'source': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}) } } complete_apps = ['profile']
agpl-3.0
4,257,737,274,522,573,300
63.190476
146
0.572618
false
3.744444
false
false
false
Edraak/circleci-edx-platform
lms/djangoapps/edraak_forus/helpers.py
1
6397
import logging import hmac from urllib import urlencode from hashlib import sha256 from collections import defaultdict from datetime import datetime, timedelta from django_countries import countries from django.utils.translation import ugettext as _ from django.core.urlresolvers import reverse from django.conf import settings from django.http import HttpResponseRedirect from django.core.exceptions import ValidationError from django.contrib.auth.models import User from django.core.validators import validate_email from opaque_keys import InvalidKeyError from xmodule.modulestore.exceptions import ItemNotFoundError from opaque_keys.edx.locations import SlashSeparatedCourseKey from xmodule.modulestore.django import modulestore from student.models import UserProfile DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' log = logging.getLogger(__name__) ordered_hmac_keys = ( 'course_id', 'email', 'name', 'enrollment_action', 'country', 'level_of_education', 'gender', 'year_of_birth', 'lang', 'time', ) def is_enabled_language(lang): return lang in dict(settings.LANGUAGES) def forus_error_redirect(*messages): message = '. '.join(messages) + '.' url = '{base_url}?{params}'.format( base_url=reverse('forus_v1_error'), params=urlencode({ 'message': message.encode('utf-8') }) ) return HttpResponseRedirect(url) def validate_forus_hmac(params): remote_hmac = params.get('forus_hmac') if not remote_hmac: log.warn('HMAC is missing for email=`%s`', params.get('email')) raise ValidationError({ "forus_hmac": [_("The security check has failed on the provided parameters")] }) params_pairs = [ u'{}={}'.format(key, params.get(key, '')) for key in ordered_hmac_keys ] msg_to_hash = u';'.join(params_pairs) secret_key = settings.FORUS_AUTH_SECRET_KEY dig = hmac.new(secret_key.encode('utf-8'), msg_to_hash.encode('utf-8'), digestmod=sha256) local_hmac = dig.hexdigest() if local_hmac != remote_hmac: log.warn( 'HMAC is not correct remote=`%s` != local=`%s`. msg_to_hash=`%s`', remote_hmac, local_hmac, msg_to_hash, ) raise ValidationError({ "forus_hmac": [_("The security check has failed on the provided parameters")] }) def validate_forus_params_values(params): errors = defaultdict(lambda: []) def mark_as_invalid(field, field_label): # Translators: This is for the ForUs API errors[field].append(_('Invalid {field_label} has been provided').format( field_label=field_label, )) try: validate_email(params.get('email')) try: user = User.objects.get(email=params.get('email')) if user.is_staff or user.is_superuser: errors['email'].append(_("ForUs profile cannot be created for admins and staff.")) except User.DoesNotExist: pass except ValidationError: # Translators: This is for the ForUs API errors['email'].append(_("The provided email format is invalid")) if params.get('gender') not in dict(UserProfile.GENDER_CHOICES): # Translators: This is for the ForUs API mark_as_invalid('gender', _('gender')) if not is_enabled_language(params.get('lang')): # Translators: This is for the ForUs API mark_as_invalid('lang', _('language')) if params.get('country') not in dict(countries): # Translators: This is for the ForUs API mark_as_invalid('lang', _('country')) if params.get('level_of_education') not in dict(UserProfile.LEVEL_OF_EDUCATION_CHOICES): # Translators: This is for the ForUs API mark_as_invalid('lang', _('level of education')) try: course_key = SlashSeparatedCourseKey.from_deprecated_string(params.get('course_id')) course = modulestore().get_course(course_key) if not course: raise ItemNotFoundError() if not course.is_self_paced(): if not course.enrollment_has_started(): # Translators: This is for the ForUs API errors['course_id'].append(_('The course has not yet been opened for enrollment')) if course.enrollment_has_ended(): # Translators: This is for the ForUs API errors['course_id'].append(_('Enrollment for this course has been closed')) except InvalidKeyError: log.warning( u"User {username} tried to {action} with invalid course id: {course_id}".format( username=params.get('username'), action=params.get('enrollment_action'), course_id=params.get('course_id'), ) ) mark_as_invalid('course_id', _('course id')) except ItemNotFoundError: # Translators: This is for the ForUs API errors['course_id'].append(_('The requested course does not exist')) try: if int(params['year_of_birth']) not in UserProfile.VALID_YEARS: # Translators: This is for the ForUs API mark_as_invalid('year_of_birth', _('birth year')) except ValueError: # Translators: This is for the ForUs API mark_as_invalid('year_of_birth', _('birth year')) try: time = datetime.strptime(params.get('time'), DATE_TIME_FORMAT) now = datetime.utcnow() if time > now: # Translators: This is for the ForUs API errors['time'].append(_('future date has been provided')) if time < (now - timedelta(days=1)): # Translators: This is for the ForUs API errors['time'].append(_('Request has expired')) except ValueError: # Translators: This is for the ForUs API mark_as_invalid('time', _('date format')) if len(errors): raise ValidationError(errors) def validate_forus_params(params): validate_forus_hmac(params) validate_forus_params_values(params) clean_params = { key: params[key] for key in ordered_hmac_keys } clean_params['forus_hmac'] = params['forus_hmac'] return clean_params def setfuncattr(name, value): def inner(func): setattr(func, name, value) return func return inner
agpl-3.0
50,745,825,195,102,190
28.344037
98
0.624355
false
4.098014
false
false
false
hbuyse/VBTournaments
accounts/migrations/0001_initial.py
1
1569
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='VBUserProfile', fields=[ ('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)), ('_club', models.CharField(max_length=100, db_column='club', blank=True)), ('_level', models.CharField(max_length=14, db_column='level', choices=[('hobby', 'Loisir'), ('departmental', 'Départemental'), ('regional_1', 'Régional 1'), ('regional_2', 'Régional 2'), ('national_1', 'National 1'), ('national_2', 'National 2'), ('national_3', 'National 3'), ('professional_a', 'Professionel A'), ('professional_b', 'Professionel B'), ('kids', 'Enfant')])), ('_phone', models.CharField(max_length=100, db_column='phone', blank=True)), ('_share_mail', models.BooleanField(db_column='share_mail', default=True)), ('_share_phone', models.BooleanField(db_column='share_phone', default=False)), ('_facebook', models.CharField(max_length=100, db_column='facebook', blank=True)), ('_twitter', models.CharField(max_length=100, db_column='twitter', blank=True)), ('_user', models.OneToOneField(to=settings.AUTH_USER_MODEL)), ], ), ]
mit
7,488,885,610,189,463,000
53
391
0.604725
false
3.764423
false
false
false
dprog-philippe-docourt/django-qr-code
qr_code/qrcode/maker.py
1
3002
"""Tools for generating QR codes. This module depends on the Segno library.""" import io from typing import Mapping, Any from django.utils.html import escape from django.utils.safestring import mark_safe import segno from qr_code.qrcode.constants import DEFAULT_CACHE_ENABLED, \ DEFAULT_URL_SIGNATURE_ENABLED from qr_code.qrcode.serve import make_qr_code_url from qr_code.qrcode.utils import QRCodeOptions def make_qr(text: Any, qr_code_options: QRCodeOptions): """Creates a QR code :rtype: segno.QRCode """ # WARNING: For compatibility reasons, we still allow to pass __proxy__ class (lazy string). Moreover, it would be OK to pass anything that has __str__ # attribute (e. g. class instance that handles phone numbers). return segno.make(str(text), **qr_code_options.kw_make()) def make_qr_code_image(text: Any, qr_code_options: QRCodeOptions) -> bytes: """ Returns a bytes object representing a QR code image for the provided text. :param str text: The text to encode :param qr_code_options: Options to create and serialize the QR code. :rtype: bytes """ qr = make_qr(text, qr_code_options) out = io.BytesIO() qr.save(out, **qr_code_options.kw_save()) return out.getvalue() def make_embedded_qr_code(text: Any, qr_code_options: QRCodeOptions) -> str: """ Generates a <svg> or <img> tag representing the QR code for the given text. This tag can be embedded into an HTML document. """ qr = make_qr(text, qr_code_options) kw = qr_code_options.kw_save() # Pop the image format from the keywords since qr.png_data_uri / qr.svg_inline # set it automatically kw.pop('kind') if qr_code_options.image_format == 'png': return mark_safe('<img src="{0}" alt="{1}">' .format(qr.png_data_uri(**kw), escape(text))) return mark_safe(qr.svg_inline(**kw)) def make_qr_code_with_args(text: Any, qr_code_args: dict) -> str: options = _options_from_args(qr_code_args) return make_embedded_qr_code(text, options) def make_qr_code_url_with_args(text: Any, qr_code_args: dict) -> str: cache_enabled = qr_code_args.pop('cache_enabled', DEFAULT_CACHE_ENABLED) url_signature_enabled = qr_code_args.pop('url_signature_enabled', DEFAULT_URL_SIGNATURE_ENABLED) options = _options_from_args(qr_code_args) return make_qr_code_url(text, options, cache_enabled=cache_enabled, url_signature_enabled=url_signature_enabled) def _options_from_args(args: Mapping) -> QRCodeOptions: """Returns a QRCodeOptions instance from the provided arguments. """ options = args.get('options') if options: if not isinstance(options, QRCodeOptions): raise TypeError('The options argument must be of type QRCodeOptions.') else: # Convert the string "None" into None kw = {k: v if v != 'None' else None for k, v in args.items()} options = QRCodeOptions(**kw) return options
bsd-3-clause
-1,548,420,874,700,346,000
37.487179
154
0.673884
false
3.462514
false
false
false
jbarlow83/OCRmyPDF
misc/watcher.py
1
5818
# Copyright (C) 2019 Ian Alexander: https://github.com/ianalexander # Copyright (C) 2020 James R Barlow: https://github.com/jbarlow83 # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import json import logging import os import sys import time from datetime import datetime from pathlib import Path import pikepdf from watchdog.events import PatternMatchingEventHandler from watchdog.observers import Observer from watchdog.observers.polling import PollingObserver import ocrmypdf # pylint: disable=logging-format-interpolation INPUT_DIRECTORY = os.getenv('OCR_INPUT_DIRECTORY', '/input') OUTPUT_DIRECTORY = os.getenv('OCR_OUTPUT_DIRECTORY', '/output') OUTPUT_DIRECTORY_YEAR_MONTH = bool(os.getenv('OCR_OUTPUT_DIRECTORY_YEAR_MONTH', '')) ON_SUCCESS_DELETE = bool(os.getenv('OCR_ON_SUCCESS_DELETE', '')) DESKEW = bool(os.getenv('OCR_DESKEW', '')) OCR_JSON_SETTINGS = json.loads(os.getenv('OCR_JSON_SETTINGS', '{}')) POLL_NEW_FILE_SECONDS = int(os.getenv('OCR_POLL_NEW_FILE_SECONDS', '1')) USE_POLLING = bool(os.getenv('OCR_USE_POLLING', '')) LOGLEVEL = os.getenv('OCR_LOGLEVEL', 'INFO') PATTERNS = ['*.pdf', '*.PDF'] log = logging.getLogger('ocrmypdf-watcher') def get_output_dir(root, basename): if OUTPUT_DIRECTORY_YEAR_MONTH: today = datetime.today() output_directory_year_month = ( Path(root) / str(today.year) / f'{today.month:02d}' ) if not output_directory_year_month.exists(): output_directory_year_month.mkdir(parents=True, exist_ok=True) output_path = Path(output_directory_year_month) / basename else: output_path = Path(OUTPUT_DIRECTORY) / basename return output_path def wait_for_file_ready(file_path): # This loop waits to make sure that the file is completely loaded on # disk before attempting to read. Docker sometimes will publish the # watchdog event before the file is actually fully on disk, causing # pikepdf to fail. retries = 5 while retries: try: pdf = pikepdf.open(file_path) except (FileNotFoundError, pikepdf.PdfError) as e: log.info(f"File {file_path} is not ready yet") log.debug("Exception was", exc_info=e) time.sleep(POLL_NEW_FILE_SECONDS) retries -= 1 else: pdf.close() return True return False def execute_ocrmypdf(file_path): file_path = Path(file_path) output_path = get_output_dir(OUTPUT_DIRECTORY, file_path.name) log.info("-" * 20) log.info(f'New file: {file_path}. Waiting until fully loaded...') if not wait_for_file_ready(file_path): log.info(f"Gave up waiting for {file_path} to become ready") return log.info(f'Attempting to OCRmyPDF to: {output_path}') exit_code = ocrmypdf.ocr( input_file=file_path, output_file=output_path, deskew=DESKEW, **OCR_JSON_SETTINGS, ) if exit_code == 0 and ON_SUCCESS_DELETE: log.info(f'OCR is done. Deleting: {file_path}') file_path.unlink() else: log.info('OCR is done') class HandleObserverEvent(PatternMatchingEventHandler): def on_any_event(self, event): if event.event_type in ['created']: execute_ocrmypdf(event.src_path) def main(): ocrmypdf.configure_logging( verbosity=( ocrmypdf.Verbosity.default if LOGLEVEL != 'DEBUG' else ocrmypdf.Verbosity.debug ), manage_root_logger=True, ) log.setLevel(LOGLEVEL) log.info( f"Starting OCRmyPDF watcher with config:\n" f"Input Directory: {INPUT_DIRECTORY}\n" f"Output Directory: {OUTPUT_DIRECTORY}\n" f"Output Directory Year & Month: {OUTPUT_DIRECTORY_YEAR_MONTH}" ) log.debug( f"INPUT_DIRECTORY: {INPUT_DIRECTORY}\n" f"OUTPUT_DIRECTORY: {OUTPUT_DIRECTORY}\n" f"OUTPUT_DIRECTORY_YEAR_MONTH: {OUTPUT_DIRECTORY_YEAR_MONTH}\n" f"ON_SUCCESS_DELETE: {ON_SUCCESS_DELETE}\n" f"DESKEW: {DESKEW}\n" f"ARGS: {OCR_JSON_SETTINGS}\n" f"POLL_NEW_FILE_SECONDS: {POLL_NEW_FILE_SECONDS}\n" f"USE_POLLING: {USE_POLLING}\n" f"LOGLEVEL: {LOGLEVEL}" ) if 'input_file' in OCR_JSON_SETTINGS or 'output_file' in OCR_JSON_SETTINGS: log.error('OCR_JSON_SETTINGS should not specify input file or output file') sys.exit(1) handler = HandleObserverEvent(patterns=PATTERNS) if USE_POLLING: observer = PollingObserver() else: observer = Observer() observer.schedule(handler, INPUT_DIRECTORY, recursive=True) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join() if __name__ == "__main__": main()
gpl-3.0
687,988,929,171,311,200
34.048193
84
0.670677
false
3.600248
false
false
false
michele-mada/cv-eyetracking-project-2017
py_eyetracker_v1.0/utils/camera/capture.py
1
2196
from threading import Thread import cv2 class WebcamVideoStream: def __init__(self, src=0, contrast=None, saturation=None, debug=False): # initialize the video camera stream and read the first frame # from the stream self.stream = cv2.VideoCapture(src) self.saved_contrast = None if contrast is not None: self.saved_contrast = self.stream.get(cv2.CAP_PROP_CONTRAST) self.stream.set(cv2.CAP_PROP_CONTRAST, contrast) if debug: print("setting camera contrast to", contrast) elif debug: print("camera contrast is", self.stream.get(cv2.CAP_PROP_CONTRAST)) self.saved_saturation = None if saturation is not None: self.saved_saturation = self.stream.get(cv2.CAP_PROP_SATURATION) self.stream.set(cv2.CAP_PROP_SATURATION, saturation) if debug: print("setting camera saturation to", saturation) elif debug: print("camera saturation is", self.stream.get(cv2.CAP_PROP_SATURATION)) (self.grabbed, self.frame) = self.stream.read() # initialize the variable used to indicate if the thread should # be stopped self.stopped = False def start(self): # start the thread to read frames from the video stream Thread(target=self.update, args=()).start() return self def update(self): # keep looping infinitely until the thread is stopped while True: # if the thread indicator variable is set, stop the thread if self.stopped: return # otherwise, read the next frame from the stream (self.grabbed, self.frame) = self.stream.read() def read(self): # return the frame most recently read return self.frame def stop(self): # indicate that the thread should be stopped self.stopped = True if self.saved_contrast is not None: self.stream.set(cv2.CAP_PROP_CONTRAST, self.saved_contrast) if self.saved_saturation is not None: self.stream.set(cv2.CAP_PROP_SATURATION, self.saved_saturation)
mit
-6,668,825,792,570,327,000
36.237288
83
0.622495
false
4.029358
false
false
false
recessburton/FTPScanner
IPRangeCalc.py
1
1526
#! /usr/bin/env python # -*- coding: utf-8 -*- ''' Copyright (C),2014-2015, YTC, www.bjfulinux.cn Created on 2015-09-24 16:40 @author: ytc recessburton@gmail.com @version: 1.0 This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/> ''' import IPInfo def ip2num(ip): ip = [int(x) for x in ip.split('.')] return ip[0]<<24 | ip[1]<<16 | ip[2]<<8 | ip[3] def num2ip(num): return '%s.%s.%s.%s' % ( (num & 0xff000000) >> 24, (num & 0x00ff0000) >> 16, (num & 0x0000ff00) >> 8, num & 0x000000ff ) def gen_ip(ip): start, end = [ip2num(x) for x in ip.split('-')] return [num2ip(num) for num in range(start, end+1) if num & 0xff] def auto_ip_get(device): ip = IPInfo.find_ip(device) mask = IPInfo.find_mask(device) start=num2ip(ip2num(ip)&ip2num(mask)) end=num2ip((ip2num(ip)|~ip2num(mask))-1) return start+'-'+end
gpl-3.0
-8,623,088,591,869,231,000
32.173913
69
0.633028
false
3.199161
false
false
false
KatiRG/flyingpigeon
flyingpigeon/processes/wps_c4i_percentile_indice.py
1
11688
from pywps.Process import WPSProcess import icclim import icclim.util.callback as callback import dateutil.parser from datetime import datetime import os from os.path import expanduser #from mkdir_p import * from flyingpigeon.utils import make_dirs transfer_limit_Mb = 100 import logging logger = logging.getLogger() class ProcessPercentileIndice(WPSProcess): def __init__(self): WPSProcess.__init__(self, identifier = 'wps_percentile_indice', # only mandatory attribute = same file name title = 'c4i - Percentile Indices', abstract = 'Computes single input, percentile based indices of temperature: TG10p, TX10p, TN10p, TG90p, TX90p, TN90p, WSDI, CSDI; and of rainfall R75p, R95p, R99p, R75TOT, R95TOT, R99TOT. This processes is also available in Climate4Impact and uses ICCLIM.', version = "1.0", metadata = [ {"title": "ICCLIM" , "href": "http://icclim.readthedocs.io/en/latest/"}, {"title": "Climate4Impact", "href": "http://climate4impact.eu/impactportal/general/index.jsp"}, ], storeSupported = True, statusSupported = True, grassLocation =False) ## self.filesBasePeriodIn = self.addLiteralInput(identifier = 'filesBasePeriod', ## title = 'Input netCDF files list (base (reference) period)', ## abstract="application/netcdf", ## type=type("S"), ## minOccurs=0, ## maxOccurs=1024, ## default = 'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20060101-20251231.nc,' + ## 'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20260101-20501231.nc,' + ## 'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20510101-20751231.nc,' + ## 'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20760101-21001231.nc') self.filesBasePeriodIn = self.addComplexInput( identifier="filesBasePeriod", title="URL to your NetCDF File", abstract="Input netCDF files list (base (reference) period). You may provide a URL or upload a NetCDF file.", minOccurs=1, maxOccurs=100, maxmegabites=10000, formats=[{"mimeType":"application/x-netcdf"}], ) self.indiceNameIn = self.addLiteralInput(identifier = 'indiceName', title = 'Index name', type=type("String"), minOccurs = 1, maxOccurs = 1, default = 'TG90p') self.indiceNameIn.values = ['TG10p', 'TX10p', 'TN10p', 'TG90p', 'TX90p', 'TN90p', 'WSDI', 'CSDI', 'R75p', 'R75TOT', 'R95p', 'R95TOT', 'R99p', 'R99TOT'] self.sliceModeIn = self.addLiteralInput(identifier = 'sliceMode', title = 'Slice mode (temporal grouping to apply to calculations)', type=type("String"), minOccurs = 1, maxOccurs = 1, default = 'year') self.sliceModeIn.values = ["year","month","ONDJFM","AMJJAS","DJF","MAM","JJA","SON"] self.timeRangeBasePeriodIn = self.addLiteralInput(identifier = 'timeRangeBasePeriod', title = 'Time range of base (reference) period, e.g. 1961-01-01/1990-12-31', abstract = "Time range is mandatory. Please fill in.", type=type("String"), minOccurs = 1, maxOccurs = 1, default = '1961-01-01/1990-12-31') self.filesStudyPeriodIn = self.addLiteralInput(identifier = 'filesStudyPeriod', title = 'Input netCDF files list (study period)', abstract="application/netcdf", type=type("S"), minOccurs=0, maxOccurs=100, default = 'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20060101-20251231.nc' ) #'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20260101-20501231.nc', #'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20510101-20751231.nc', #'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tas_day_EC-EARTH_rcp26_r8i1p1_20760101-21001231.nc' self.timeRangeStudyPeriodIn = self.addLiteralInput(identifier = 'timeRangeStudyPeriod', title = 'Time range, e.g. 2010-01-01/2012-12-31', abstract = "Time range is mandatory. Please fill in.", type=type("String"), minOccurs=1, default = '2010-01-01/2012-12-31') self.varNameIn = self.addLiteralInput(identifier = 'varName', title = 'Variable name to process', type=type("String"), minOccurs=1, default = 'tas') self.leapNonLeapYearsIn = self.addLiteralInput( identifier = 'leapNonLeapYears', title = 'Take only leap years?', abstract = "Method for computing a percentile value for the calendar day of February 29th. Default: take all years (leap and non-leap)", type=type(False), minOccurs=1, maxOccurs=1, default = False) #self.leapNonLeapYearsIn.values = ["take all years (leap and non-leap)", "take only leap years"] ## self.outputFileNameIn = self.addLiteralInput(identifier = 'outputFileName', ## title = 'Name of output netCDF file', ## type=type("String"), ## default = './out_icclim.nc') self.NLevelIn = self.addLiteralInput( identifier = 'NLevel', title = 'Number of levels (if 4D variable)', type=type(1), minOccurs = 0) #self.opendapURL = self.addLiteralOutput(identifier = "opendapURL",title = "opendapURL"); self.output = self.addComplexOutput( identifier="output", title="Climate Index", abstract="Calculated climate index with icclim.", formats=[{"mimeType":"application/x-netcdf"}], asReference=True) def callback(self,message,percentage): self.status.set("%s" % str(message),str(percentage)); def execute(self): # Very important: This allows the NetCDF library to find the users credentials (X509 cert) #homedir = os.environ['HOME'] #os.chdir(homedir) def callback(b): self.callback("Processing",b) indice_name = self.indiceNameIn.getValue() in_files = self.getInputValues(identifier='filesBasePeriod') time_range_base_period = self.timeRangeBasePeriodIn.getValue() var_name = self.varNameIn.getValue() leap_nonleap_years = self.leapNonLeapYearsIn.getValue() in_files.extend(self.getInputValues(identifier='filesStudyPeriod')) time_range_study_period = self.timeRangeStudyPeriodIn.getValue() slice_mode = self.sliceModeIn.getValue() #out_file_name = self.outputFileNameIn.getValue() out_file_name = 'out.nc' level = self.NLevelIn.getValue() if time_range_base_period: startdate = dateutil.parser.parse(time_range_base_period.split("/")[0]) stopdate = dateutil.parser.parse(time_range_base_period.split("/")[1]) time_range_base_period = [startdate,stopdate] if time_range_study_period: startdate = dateutil.parser.parse(time_range_study_period.split("/")[0]) stopdate = dateutil.parser.parse(time_range_study_period.split("/")[1]) time_range_study_period = [startdate,stopdate] ## if (leap_nonleap_years == "take all years (leap and non-leap)"): ## leap_nonleap_years = False ## else: ## leap_nonleap_years = True #home = expanduser("~") self.status.set("Preparing....", 0) #pathToAppendToOutputDirectory = "/WPS_"+self.identifier+"_" + datetime.now().strftime("%Y%m%dT%H%M%SZ") """ URL output path """ #fileOutURL = os.environ['POF_OUTPUT_URL'] + pathToAppendToOutputDirectory+"/" """ Internal output path""" #fileOutPath = os.environ['POF_OUTPUT_PATH'] + pathToAppendToOutputDirectory +"/" """ Create output directory """ #mkdir_p(fileOutPath) self.status.set("Processing input list: " + str(in_files), 0) icclim.indice(indice_name=indice_name, in_files=in_files, var_name=var_name, slice_mode=slice_mode, time_range=time_range_study_period, out_file=out_file_name, N_lev=level, transfer_limit_Mbytes=transfer_limit_Mb, callback=callback, callback_percentage_start_value=0, callback_percentage_total=100, base_period_time_range=time_range_base_period, window_width=5, only_leap_years=leap_nonleap_years, ignore_Feb29th=True, interpolation='hyndman_fan', netcdf_version='NETCDF4_CLASSIC', out_unit='days') """ Set output """ #url = fileOutURL+"/"+out_file_name; #self.opendapURL.setValue(url); self.output.setValue(out_file_name) self.status.set("ready",100);
apache-2.0
4,282,581,633,001,511,400
48.948718
285
0.488193
false
4.159431
true
false
false
maxli99/SmartChecker
modules/flexins/TN_CN_20151124_IPDU_FreeMem.py
1
9078
# -*- coding: utf-8 -*- u"""FlexiNS/MME IPDU空闲内存持续减少的问题 - NS/MME软件版本为NS3.15, NS3.15.1 - ACPI4-A& ACPI4-B 解决方法:单元倒换或重启 """ import re from libs.checker import ResultInfo,CheckStatus ## Mandatory variables ##----------------------------------------------------- #check_id = '20160308.TN20151013' module_id = 'NSTNCN_20151124' tag = ['flexins','china'] priority = 'critical' name = "TN_CN_20151124_IPDU_FreeMem" desc = __doc__ criteria = u"""\ (1)检查NS/MME软件版本为 ['N5 1.19-3','N5 1.17-5'] 或者更高版本. (2) IPDU FREE MEMORY < 20%. 或者 IPDU 单元1143 告警 或者 lnx-mmeGTPLBS 模块占用内存较多>20%\ """ ## Optional variables ##----------------------------------------------------- # available target versions: target_versions = ['N5 1.19-3','N5 1.17-5'] check_commands = [ ("ZWQO:CR;","show the VERSION in the MME/SGSN"), ("ZAHO:;","show the alarms in the MME/SGSN"), ("ZDOI:IPDU,(active):M;","show the memory of active IPDU ."), ("ZDDE:IPDU,(active):\"top -n1\";","show the memory usage of process lnx-mmeGTPLBS in active IPDU .") ] know_version_identify_Patt = r"\s*(N\d+\s+\d+.\d+-\d+)\s*" ##----------------------------------------------------- ##version_up_NS15_id('N5 1.17-5') def version_up_NS15_id(NsVersionId): up_id = 0 version_id_Patt = r"\s*N(\d+)\s+\d+.\d+-\d+\s*" m=re.search(version_id_Patt,NsVersionId) if m: big_version_id = m.group(1) if int(big_version_id) >= 5: up_id =1 return up_id def is_NS15_version_id(NsVersionId,Ns15VersionList): if NsVersionId in Ns15VersionList: return 1 else: return 0 def Find_NS_MME_Patt_Return_Info_List(LogFile,CommandStr,InfoPatt,ReturnInfoLen): Command_start=False Command_end=False return_info_list=[] Command_start_Patt=r"\s*[Z]?%s\s*" % (CommandStr) Command_start_Patt=Command_start_Patt.replace(':;','[:]?;') ## print "Command_start_Patt =",Command_start_Patt Command_end_Patt=r"\s*COMMAND\s+EXECUTED\s*$" Find_Info_Patt=InfoPatt ## print "Find_Info_Patt =",Find_Info_Patt return_Len = ReturnInfoLen+1 fp=open(LogFile,'r') for line in fp.readlines(): if Command_start==False and Command_end==False: m=re.search(Command_start_Patt,line) if m: Command_start=True continue elif Command_start==True and Command_end==False: m0=re.search(Command_end_Patt,line) m1=re.search(Find_Info_Patt,line) if m0: Command_end=True continue if m1: ## print "line =",line for infoId in range(1,return_Len): try: return_info_list.append(m1.group(infoId)) except IndexError: return_info_list.append('Not_Find_Infomation') continue else: break fp.close() return return_info_list def F_MME_Patt_Return_Info_List(LogFile,CommandStr,InfoPatt1,Deviation=0,InforPatt2="",groupid1=0,groupid2=0): """ 1, 2,如果Deviation=0,但 InforPatt2《》“”,标明只要在命令开始后找到InforPatt2即可,不考虑位置, 待实现 3,groupid[0]是指InfoPatt1 匹配后返回的哪个group,groupid[1]是InforPatt2匹配后返回的group """ Command_start=False Command_end=False m=None m0=None m1=None m2=None m_first=None m_second=None return_info_list=[["UNHIT","0"],["","0"],["","0"]] #DDE:IPDU,0:"top -n1",; Command_start_Patt=r"\s*[Z]?%s\s*" % (CommandStr) Command_start_Patt=Command_start_Patt.replace(':;','[:]?;') ## print "Command_start_Patt =",Command_start_Patt Command_end_Patt=r"\s*COMMAND\s+EXECUTED\s*$" Find_Info_Patt=InfoPatt1 ## print "Find_Info_Patt =",Find_Info_Patt #return_Len = ReturnInfoLen+1 fp=open(LogFile,'r') LogLines=fp.readlines() i=-1 for line in LogLines: i=i+1 if Command_start==False and Command_end==False: m=re.search(Command_start_Patt,line) if m: Command_start=True continue elif Command_start==True and Command_end==False: #command begining but not finished m0=re.search(Command_end_Patt,line) if m0: #command finished Command_end=True break #13198 root 20 0 620m 291m 3584 S 70.6 1.2 9363:13 lnx-mmeGTPLBS m1=re.search(Find_Info_Patt,line) if m1: #The first pattern hit, check the seconde pattern in deviation line return_info_list[0][0]="HIT" m_first=m1.group(0) if m1.group(0) : return_info_list[1][0]=m1.group(0) if groupid1<>0 : return_info_list[1][1]=m1.group(groupid1) if Deviation<>0: m2=re.search(InforPatt2,LogLines[i+Deviation]) if m2 : m_second=m2.group(0) if m_second : return_info_list[2][0]=m2.group(0) if groupid2<>0 : return_info_list[2][1]=m1.group(groupid2) #if m_first : return_info_list.append(m_first) #if m_second : return_info_list.append(m_second) fp.close() return return_info_list def returnNotMatchItemInList(List_two,ItemName): ItemName_New = '' for item_name in List_two: if item_name != ItemName: ItemName_New = item_name return ItemName_New ## Mandatory function: run def run(logfile): result = ResultInfo(name,priority=priority) info = [] errmsg = '' Ns_version_Patt=r"\s*\S+\s+BU\s+\S+\s+(\w+\d+\s*\S+)\s+Y\s+Y\s*$" abnormal_flag=0 try : version = Find_NS_MME_Patt_Return_Info_List(logfile,'WQO:CR;',Ns_version_Patt,1)[0] except IndexError: version = '' ## print "\n****Find version id is : ",version if is_NS15_version_id(version,target_versions)>0 or version_up_NS15_id(version)>0: result.status = CheckStatus.PASSED info.append(u" - 检查到 NS/MME 软件版本为:'%s' ,它属于或者高于NS15版本." % version) else: m=re.search(know_version_identify_Patt,version) if m: result.status = CheckStatus.FAILED info.append(u" - 检查到 NS/MME 软件版本为:'%s' ,它不属于或者低于NS15版本." % version) else: result.status = CheckStatus.UNKNOWN info.append(u" - 检查到 NS/MME 软件版本为:'%s' ,它不属于NS/MME主流版本,请手动确认版本信息." % version) if version == '': errmsg = ' Have not find NS Version release identification !!' result.update(info=info,error=errmsg) return result #条件一 IPDU FREE MEMORY >20% #ZDOI:IPDU,0:M; #FREE MEMORY 2265 39 CommandPatt=r"DOI:IPDU,\d{1}:M:+;" InfoPatt1=r"^\s*FREE MEMORY\s+\S+\s+(\S+)\s*$" InfoPatt2=r"" try: MME_Usage = F_MME_Patt_Return_Info_List(logfile,CommandPatt,InfoPatt1,0,InfoPatt2,1,0) except IndexError: MME_Usage[0][0] = "Unkown Error!" MME_Usage[0][1] = -1 if MME_Usage[0][0] =="HIT" : if float(MME_Usage[1][1])<20 : abnormal_flag=1 info.append(u" - 检查到 NS/MME IPDU 空闲内存存量过低 ."+MME_Usage[1][1]) else: info.append(u" - 检查到 NS/MME IPDU 空闲内存存量在门限范围: "+MME_Usage[1][1]) elif MME_Usage[0][0] =="UNHIT" : info.append(u" - 没有检查到特征字符串.") else: abnormal_flag=-1 info.append(u" - 检查过程中异常错误,结果未知。 Unknown Result.") #条件二 ,IPDU 告警1143 """ <HIST> SHMME03BNK IPDU-0 SWITCH 2015-11-01 02:13:02.99 * DISTUR IPDU-0 1A001-00-5 CPUPRO (6270) 1143 AMOUNT OF FREE MEMORY REDUCED 00000000 00032000 00031A4C 00593DD0 """ InfoPatt_mapping1=r"\s*1143\s+AMOUNT OF FREE MEMORY REDUCED" InfoPatt_mapping2=r"IPDU-" try: MME_Alarm_1143 = F_MME_Patt_Return_Info_List(logfile,'AHO:;',InfoPatt_mapping1,-1,InfoPatt_mapping2) except IndexError: MME_Alarm_1143[0][0] = "Unkown Error!" MME_Alarm_1143[0][1] = -1 if MME_Alarm_1143[0][0] =="HIT": abnormal_flag=1 info.append(u" - 检查到 NS/MME 1143 AMOUNT OF FREE MEMORY REDUCED 告警.") elif MME_Alarm_1143[0][0] =="UNHIT": info.append(u" - 检查到 NO Alarm 1143.") else: abnormal_flag=-1 info.append(u" - 检查到过程中异常错误,结果未知。 Unknown Result.") #条件三 IPDU lnx-mmeGTPLBS 进程内存利用 < 20% CommandPatt=r"DDE:IPDU,\d{1}:\"top -n1\",;" #11965 root 20 0 554m 231m 3500 S 9.9 4.0 3476:31 lnx-mmeGTPLBS InfoPatt1=r"^.*\s+(\S+)\s+\S+\s+lnx-mmeGTPLBS" InfoPatt2=r"" try: MME_Usage = F_MME_Patt_Return_Info_List(logfile,CommandPatt,InfoPatt1,0,InfoPatt2,1,0) except IndexError: MME_Usage[0][0] = "Unkown Error!" MME_Usage[0][1] = -1 if MME_Usage[0][0] =="HIT" : if float(MME_Usage[1][1])>20 : abnormal_flag=1 info.append(u" - 检查到 NS/MME slnx-mmeGTPLBS 内存占用过高 ."+MME_Usage[1][1]) else: info.append(u" - 检查到 NS/MME slnx-mmeGTPLBS 内存占用在门限范围: "+MME_Usage[1][1]) elif MME_Usage[0][0] =="UNHIT" : info.append(u" - 没有检查到特征字符串.") else: abnormal_flag=-1 info.append(u" - 检查过程中异常错误,结果未知。 Unknown Result.") if abnormal_flag==-1: result.status = CheckStatus.UNKNOWN elif abnormal_flag==1 : result.status = CheckStatus.FAILED else: result.status = CheckStatus.PASSED #info=[line+'\n' for line in info] result.update(info=info,error=errmsg) return result
mit
607,413,475,795,255,300
27.664407
110
0.641675
false
2.174338
false
false
false
ahammel/Exasott
exasott/test/test_board.py
1
1758
"""Unit tests for the exasott.boards module. """ from exasott import boards, test import pytest class SetupBoardTest(object): """Shared objects for all exasott.board test classes. """ normal_board = boards.Board(8, 8) rectangle_board = boards.Board(2, 6) rectangle_board_str = (" AB\n" "1OO\n" "2OO\n" "3OO\n" "4OO\n" "5OO\n" "6OO\n") class TestBoardMethods(SetupBoardTest): """Unit tests for the Board class's methods. """ def test_str(self): """Specifications for Board.__str__ """ assert str(self.normal_board) == test.NORMAL_BOARD_STR assert str(self.rectangle_board) == self.rectangle_board_str def test_get_token(self): """Board.get_token should return the value of the token or throw a pre-defined error if the toke is out of range. """ for i in range(8): for j in range(8): assert self.normal_board.get_token(i, j) == 1 with pytest.raises(boards.BoardRangeError): self.normal_board.get_token(1, 10) def test_remove_token(self): """Board.remove_token should change the value of the token from 1 to zero, throwing an exception if either the token is out of range or already zero. """ self.normal_board.remove_token(0, 3) assert self.normal_board.get_token(0, 3) == 0 with pytest.raises(boards.BoardRangeError): self.normal_board.remove_token(0, 10) with pytest.raises(boards.MissingTokenError): self.normal_board.remove_token(0, 3)
bsd-3-clause
5,777,333,420,259,835,000
28.3
76
0.562571
false
3.838428
true
false
false
sisap-ics/sisaptools
sisaptools/database.py
1
21481
# -*- coding: utf8 -*- """ Eines per a la connexió a bases de dades relacionals (Oracle i MariaDB) i la manipulació de les seves dades i estructures. """ import datetime import os import random import time import cx_Oracle import MySQLdb.cursors from .constants import (APP_CHARSET, IS_PYTHON_3, TMP_FOLDER, MARIA_CHARSET, MARIA_COLLATE, MARIA_STORAGE) from .aes import AESCipher from .services import DB_INSTANCES, DB_CREDENTIALS from .textfile import TextFile class Database(object): """ Classe principal. S'instancia per cada connexió a una base de dades. Tots els mètodes es poden utilitzar per Oracle i MariaDB si no s'especifica el contrari. """ def __init__(self, instance, schema, retry=None): """ Inicialització de paràmetres i connexió a la base de dades. En cas d'error, intenta cada (retry) segons. """ hacked_instance = instance + 'p' if schema == 'prod' else instance attributes = DB_INSTANCES[hacked_instance] self.engine = attributes['engine'] self.host = attributes['host'] self.port = attributes['port'] if self.engine == 'my': self.local_infile = attributes['local_infile'] self.user, self.password = DB_CREDENTIALS[instance] self.database = schema elif self.engine == 'ora': self.sid = attributes.get('sid', None) self.service_name = attributes.get('service_name', None) self.user, self.password = DB_CREDENTIALS[instance][schema] self.available = False while not self.available: try: self.connect() except Exception as e: if any([word in str(e) for word in ('1049', 'Unknown database')]): self.connect(existent=False) self.recreate_database() elif retry: time.sleep(retry) else: raise else: self.available = True def connect(self, existent=True): """Desencripta password i connecta a la base de dades.""" password = AESCipher().decrypt(self.password) if self.password else '' if self.engine == 'my': self.connection = MySQLdb.connect( host=self.host, port=self.port, user=self.user, passwd=password, db=self.database if existent else 'information_schema', cursorclass=MySQLdb.cursors.SSCursor, charset=APP_CHARSET, use_unicode=IS_PYTHON_3, local_infile=1 * self.local_infile ) elif self.engine == 'ora': if self.sid: self.dsn = cx_Oracle.makedsn(self.host, self.port, self.sid) else: self.dsn = cx_Oracle.makedsn(self.host, self.port, service_name=self.service_name) self.connection = cx_Oracle.connect(self.user, password, self.dsn) def execute(self, sql): """Executa una sentència SQL.""" self.cursor = self.connection.cursor() self.cursor.execute(sql) self.cursor.close() self.connection.commit() def recreate_database(self): """Elimina i torna a crear una base de dades (MariaDB).""" self.execute('drop database if exists {}'.format(self.database)) sql = 'create database {} character set {} collate {}' self.execute(sql.format(self.database, MARIA_CHARSET, MARIA_COLLATE)) self.execute('use {}'.format(self.database)) def create_table(self, table, columns, pk=None, partition_type='hash', partition_id='id', partitions=None, storage=MARIA_STORAGE, remove=False): """Crea una taula a la base de dades.""" if remove: self.drop_table(table) if pk: if self.engine == 'my': spec = '({}, PRIMARY KEY ({}))'.format(', '.join(columns), ', '.join(pk)) elif self.engine == 'ora': spec = '({}, CONSTRAINT {}_pk PRIMARY KEY ({}))'.format( ', '.join(columns), table, ', '.join(pk)) else: spec = '({})'.format(', '.join(columns)) if self.engine == 'my': this = ' engine {} character set {} collate {}' spec += this.format(storage, MARIA_CHARSET, MARIA_COLLATE) if partitions: this = ' partition by {} ({}) {}' spec += this.format(partition_type, partition_id, 'partitions {}'.format(partitions) if partition_type == 'hash' else '({})'.format(', '.join(partitions))) try: self.execute('create table {} {}'.format(table, spec)) except Exception as e: s = str(e) if not any([word in s for word in ('1050', 'ORA-00955')]): raise e def drop_table(self, table): """Elimina una taula.""" if self.engine == 'my': self.execute('drop table if exists {}'.format(table)) elif self.engine == 'ora': try: self.execute('drop table {} PURGE'.format(table)) except Exception: pass def rename_table(self, old_name, new_name, ts=False): """Reanomena una taula.""" if ts: ara = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') new_name += '_{}'.format(ara) self.drop_table(new_name) try: if self.engine == 'my': self.execute('rename table {} to {}'.format(old_name, new_name)) elif self.engine == 'ora': self.execute('rename {} to {}'.format(old_name, new_name)) except Exception: done = False else: done = True return done def get_all(self, sql, limit=None): """ Crea un generator que retorna d'un en un tots els registres obtinguts per una sentència SQL. """ if limit: sql = self.get_limit_clause(sql, limit) self.cursor = self.connection.cursor() self.cursor.execute(sql) for row in self.cursor: yield row self.cursor.close() def get_many(self, sql, n, limit=None): """ Crea un generator que retorna de n en n tots els registres obtinguts per una sentència SQL. """ if limit: sql = self.get_limit_clause(sql, limit) self.cursor = self.connection.cursor() self.cursor.execute(sql) while True: result = self.cursor.fetchmany(n) if not result: break yield result self.cursor.close() def get_limit_clause(self, sql, rows): """ Modifica la sentència SQL de get_all o get_many per limitar el número de registres. """ if self.engine == 'my': spec = ' limit {}'.format(rows) elif self.engine == 'ora': particula = 'and' if 'where' in sql else 'where' spec = ' {} rownum <= {}'.format(particula, rows) sql += spec return sql def get_one(self, sql): """Retorna un registre d'una sentència SQL.""" self.cursor = self.connection.cursor() self.cursor.execute(sql) resul = self.cursor.fetchone() self.cursor.close() return resul def list_to_table(self, iterable, table, partition=None, chunk=None): """ Insereix un iterable a una taula: -a MariaDB utilitza txt_to_table -a Oracle utilitza _list_to_table_oracle """ if self.engine == 'my': rand = random.randrange(0, 2 ** 64) filename = '{}/{}_{}.txt'.format(TMP_FOLDER, table, rand) delimiter = '@,' endline = '|;' with TextFile(filename) as file: file.write_iterable(iterable, delimiter, endline) self.file_to_table(filename, table, partition, delimiter, endline) elif self.engine == 'ora': values = [':{}'.format(i + 1) for i in range(len(iterable[0]))] values = ', '.join(values) sql = 'insert into {} VALUES ({})'.format(table, values) if chunk: for data in [iterable[i:i + chunk] for i in range(0, len(iterable), chunk)]: self._list_to_table_oracle(sql, data) else: self._list_to_table_oracle(sql, iterable) def _list_to_table_oracle(self, sql, iterable): """Auxiliar per càrrega a oracle.""" self.cursor = self.connection.cursor() self.cursor.prepare(sql) self.cursor.executemany(None, iterable) self.cursor.close() self.connection.commit() def file_to_table(self, filename, table, partition, delimiter, endline): """Carrega un fitxer a una taula (MariaDB).""" sql = "LOAD DATA {local} INFILE '{filename}' \ ignore INTO TABLE {table} {partition} \ CHARACTER SET {charset} \ FIELDS TERMINATED BY '{delimiter}' \ LINES TERMINATED BY '{endline}'" sql = sql.format( local='LOCAL' if self.local_infile else '', filename=filename, table=table, partition='PARTITION ({})'.format(partition) if partition else '', charset=APP_CHARSET, delimiter=delimiter, endline=endline ) self.execute(sql) os.remove(filename) def get_tables(self): """Retorna les taules de la base de dades.""" if self.engine == 'my': sql = "select table_name, table_rows \ from information_schema.tables \ where table_schema = '{}'".format(self.database) elif self.engine == 'ora': sql = "select table_name, num_rows from all_tables" tables = {table: rows for table, rows in self.get_all(sql)} return tables def get_table_owner(self, table, dblink=None): """Retorna el propietari i el nom original d'una taula (Oracle).""" table = table.upper() dblink_txt = "@{}".format(dblink.upper()) if dblink else "" try: sql = "select table_name from user_tables{} \ where table_name = '{}'" table, = self.get_one(sql.format(dblink_txt, table)) owner, = self.get_one('select user from dual') except TypeError: try: sql = "select table_owner, table_name from user_synonyms{} \ where synonym_name = '{}'" owner, table = self.get_one(sql.format(dblink_txt, table)) try: sql = "select table_owner, table_name from all_synonyms{} \ where owner = '{}' and synonym_name = '{}'" owner, table = self.get_one(sql.format(dblink_txt, owner, table)) except TypeError: pass except TypeError: try: sql = "select table_owner, table_name from all_synonyms{} \ where owner = 'PUBLIC' and \ synonym_name = '{}' and \ db_link is null" owner, table = self.get_one(sql.format(dblink_txt, table)) except TypeError: sql = "select owner, table_name from all_tables{} \ where table_name = '{}'" owner, table = self.get_one(sql.format(dblink_txt, table)) return owner, table def get_table_count(self, table, dblink=None): """Retorna el número de registres d'una taula.""" if self.engine == 'my': sql = "select table_rows from information_schema.tables \ where table_schema = '{}' and table_name = '{}'" sql = sql.format(self.database, table) elif self.engine == 'ora': owner, table = self.get_table_owner(table, dblink=dblink) dblink_txt = "@{}".format(dblink.upper()) if dblink else "" sql = "select nvl(num_rows, 0) from all_tables{} \ where owner = '{}' and table_name = '{}'" sql = sql.format(dblink_txt, owner.upper(), table.upper()) rows, = self.get_one(sql) return rows def get_table_partitions(self, table, dblink=None): """ Retorna un diccionari amb les particions d'una taula i el seu número de registres. """ if self.engine == 'my': sql = "select engine from information_schema.tables \ where table_schema = '{}' and table_name = '{}'" sql = sql.format(self.database, table) is_merge = 'MRG' in self.get_one(sql)[0] if is_merge: sql = 'show create table {}'.format(table) create = self.get_one(sql)[1] raw_tables = create.split('UNION=(')[1][:-1].split(',') tables = [tab.replace('`', '') for tab in raw_tables] sql = "select table_name, table_rows \ from information_schema.tables \ where table_schema = '{}' and table_name in {}" sql = sql.format(self.database, tuple(tables)) else: sql = "select partition_name, table_rows \ from information_schema.partitions \ where table_schema = '{}' and table_name = '{}' \ and partition_name is not null" sql = sql.format(self.database, table) elif self.engine == 'ora': owner, table = self.get_table_owner(table, dblink=dblink) dblink_txt = "@{}".format(dblink.upper()) if dblink else "" sql = "select partition_name, nvl(num_rows, 0) \ from all_tab_partitions{} \ where table_owner = '{}' and table_name = '{}'" sql = sql.format(dblink_txt, owner.upper(), table.upper()) partitions = {} for partition, rows in self.get_all(sql): partitions[partition] = rows return partitions def get_table_columns(self, table, dblink=None): """Retorna una llista amb les columnes de la taula.""" if self.engine == 'my': sql = "select column_name from information_schema.columns \ where table_schema = '{}' and table_name = '{}' \ order by ordinal_position".format(self.database, table) elif self.engine == 'ora': owner, table_real = self.get_table_owner(table, dblink=dblink) dblink_txt = "@{}".format(dblink.upper()) if dblink else "" sql = "select column_name from all_tab_columns{} \ where owner = '{}' and table_name='{}' \ order by column_id".format(dblink_txt, owner.upper(), table_real.upper()) columns = [column for column, in self.get_all(sql)] return columns def get_column_information(self, column, table, desti='my', dblink=None): """ Retorna un diccionari amb les instrucciones necessàries tant per crear com per consultar la columna especificada. """ if self.engine == 'my': sql = "select column_type, data_type, character_maximum_length, \ numeric_precision from information_schema.columns \ where table_schema = '{}' and table_name = '{}' \ and column_name = '{}'".format(self.database, table, column) done, type, char, num = self.get_one(sql) length, precision, scale = None, None, None elif self.engine == 'ora': owner, table_real = self.get_table_owner(table, dblink=dblink) dblink_txt = "@{}".format(dblink.upper()) if dblink else "" sql = "select data_type, data_length, data_precision, data_scale \ from all_tab_columns{} \ where owner = '{}' and table_name = '{}' \ and column_name = '{}'".format(dblink_txt, owner.upper(), table_real.upper(), column.upper()) type, length, precision, scale = self.get_one(sql) done, char, num = None, None, None words_in = ('DAT', 'VAL_D_V') words_out = ('EDAT',) if type == 'NUMBER' and \ any([word in column.upper() for word in words_in]) and \ not any([word in column.upper() for word in words_out]): type = 'DATE_J' param = {'column': column, 'length': length, 'precision': precision, 'scale': scale, 'done': done, 'char': char, 'num': num} conv = {('my', 'date', 'my', 'create'): '{column} {done}', ('my', 'date', 'my', 'query'): "date_format({column}, '%Y%m%d')", ('my', 'date', 'ora', 'create'): '{column} date', ('my', 'date', 'ora', 'query'): column, ('my', 'int', 'my', 'create'): '{column} {done}', ('my', 'int', 'my', 'query'): column, ('my', 'int', 'ora', 'create'): '{column} number({num})', ('my', 'int', 'ora', 'query'): column, ('my', 'double', 'my', 'create'): '{column} {done}', ('my', 'double', 'my', 'query'): column, ('my', 'double', 'ora', 'create'): '{column} number({num})', ('my', 'double', 'ora', 'query'): column, ('my', 'varchar', 'my', 'create'): '{column} {done}', ('my', 'varchar', 'my', 'query'): column, ('my', 'varchar', 'ora', 'create'): '{column} varchar2({char})', ('my', 'varchar', 'ora', 'query'): column, ('ora', 'DATE', 'my', 'create'): '{column} date', ('ora', 'DATE', 'my', 'query'): "to_char({column}, 'YYYYMMDD')", ('ora', 'DATE', 'ora', 'create'): '{column} date', ('ora', 'DATE', 'ora', 'query'): column, ('ora', 'DATE_J', 'my', 'create'): '{column} date', ('ora', 'DATE_J', 'my', 'query'): "to_char(to_date({column}, 'J'), 'YYYYMMDD')", ('ora', 'DATE_J', 'ora', 'create'): '{column} date', ('ora', 'DATE_J', 'ora', 'query'): "to_date({column}, 'J')", ('ora', 'NUMBER', 'my', 'create'): '{column} int' if not scale else '{column} double', ('ora', 'NUMBER', 'my', 'query'): column, ('ora', 'NUMBER', 'ora', 'create'): '{column} number({precision}, {scale})' if scale else '{column} number', ('ora', 'NUMBER', 'ora', 'query'): column, ('ora', 'VARCHAR2', 'my', 'create'): "{column} varchar({length})", ('ora', 'VARCHAR2', 'my', 'query'): 'ltrim(rtrim({column}))' if length > 49 else column, ('ora', 'VARCHAR2', 'ora', 'create'): '{column} varchar2({length})', ('ora', 'VARCHAR2', 'ora', 'query'): column} resul = {key: conv[(self.engine, type, desti, key)].format(**param) for key in ('create', 'query')} return resul def set_statistics(self, table): """Calcula les estadístiques d'una taula d'Oracle.""" self.cursor = self.connection.cursor() proc = 'DBMS_STATS.GATHER_TABLE_STATS' self.cursor.callproc(proc, (self.user, table.upper())) self.cursor.close() self.connection.commit() def set_grants(self, grants, tables, users, inheritance=True): """Estableix grants.""" comparacio = str if IS_PYTHON_3 else basestring if isinstance(grants, comparacio): grants = (grants,) if isinstance(tables, comparacio): tables = (tables,) if isinstance(users, comparacio): users = (users,) sql = 'grant {} on {} to {} {}' self.execute(sql.format(', '.join(grants), ', '.join(tables), ', '.join(users), 'with grant option' if inheritance else '')) def disconnect(self): """Desconnecta de la base de dades.""" self.connection.close() def __enter__(self): """Context manager.""" return self def __exit__(self, exc_type, exc_val, exc_tb): """Context manager.""" self.disconnect()
mit
8,641,174,514,809,038,000
42.802041
79
0.499837
false
4.092868
false
false
false
zestrada/nova-cs498cc
nova/tests/api/openstack/compute/contrib/test_baremetal_nodes.py
7
9013
# Copyright (c) 2013 NTT DOCOMO, INC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from webob import exc from nova.api.openstack.compute.contrib import baremetal_nodes from nova import context from nova import exception from nova import test from nova.virt.baremetal import db class FakeRequest(object): def __init__(self, context): self.environ = {"nova.context": context} class BareMetalNodesTest(test.TestCase): def setUp(self): super(BareMetalNodesTest, self).setUp() self.context = context.get_admin_context() self.controller = baremetal_nodes.BareMetalNodeController() self.request = FakeRequest(self.context) def test_create(self): node = { 'service_host': "host", 'cpus': 8, 'memory_mb': 8192, 'local_gb': 128, 'pm_address': "10.1.2.3", 'pm_user': "pm_user", 'pm_password': "pm_pass", 'terminal_port': 8000, 'interfaces': [], } response = node.copy() response['id'] = 100 del response['pm_password'] response['instance_uuid'] = None self.mox.StubOutWithMock(db, 'bm_node_create') db.bm_node_create(self.context, node).AndReturn(response) self.mox.ReplayAll() res_dict = self.controller.create(self.request, {'node': node}) self.assertEqual({'node': response}, res_dict) def test_delete(self): self.mox.StubOutWithMock(db, 'bm_node_destroy') db.bm_node_destroy(self.context, 1) self.mox.ReplayAll() self.controller.delete(self.request, 1) def test_delete_node_not_found(self): self.mox.StubOutWithMock(db, 'bm_node_destroy') db.bm_node_destroy(self.context, 1).\ AndRaise(exception.NodeNotFound(node_id=1)) self.mox.ReplayAll() self.assertRaises( exc.HTTPNotFound, self.controller.delete, self.request, 1) def test_index(self): nodes = [{'id': 1}, {'id': 2}, ] interfaces = [{'id': 1, 'address': '11:11:11:11:11:11'}, {'id': 2, 'address': '22:22:22:22:22:22'}, ] self.mox.StubOutWithMock(db, 'bm_node_get_all') self.mox.StubOutWithMock(db, 'bm_interface_get_all_by_bm_node_id') db.bm_node_get_all(self.context).AndReturn(nodes) db.bm_interface_get_all_by_bm_node_id(self.context, 1).\ AndRaise(exception.NodeNotFound(node_id=1)) db.bm_interface_get_all_by_bm_node_id(self.context, 2).\ AndReturn(interfaces) self.mox.ReplayAll() res_dict = self.controller.index(self.request) self.assertEqual(2, len(res_dict['nodes'])) self.assertEqual([], res_dict['nodes'][0]['interfaces']) self.assertEqual(2, len(res_dict['nodes'][1]['interfaces'])) def test_show(self): node_id = 1 node = {'id': node_id} interfaces = [{'id': 1, 'address': '11:11:11:11:11:11'}, {'id': 2, 'address': '22:22:22:22:22:22'}, ] self.mox.StubOutWithMock(db, 'bm_node_get') self.mox.StubOutWithMock(db, 'bm_interface_get_all_by_bm_node_id') db.bm_node_get(self.context, node_id).AndReturn(node) db.bm_interface_get_all_by_bm_node_id(self.context, node_id).\ AndReturn(interfaces) self.mox.ReplayAll() res_dict = self.controller.show(self.request, node_id) self.assertEqual(node_id, res_dict['node']['id']) self.assertEqual(2, len(res_dict['node']['interfaces'])) def test_show_no_interfaces(self): node_id = 1 node = {'id': node_id} self.mox.StubOutWithMock(db, 'bm_node_get') self.mox.StubOutWithMock(db, 'bm_interface_get_all_by_bm_node_id') db.bm_node_get(self.context, node_id).AndReturn(node) db.bm_interface_get_all_by_bm_node_id(self.context, node_id).\ AndRaise(exception.NodeNotFound(node_id=node_id)) self.mox.ReplayAll() res_dict = self.controller.show(self.request, node_id) self.assertEqual(node_id, res_dict['node']['id']) self.assertEqual(0, len(res_dict['node']['interfaces'])) def test_add_interface(self): node_id = 1 address = '11:22:33:44:55:66' body = {'add_interface': {'address': address}} self.mox.StubOutWithMock(db, 'bm_node_get') self.mox.StubOutWithMock(db, 'bm_interface_create') self.mox.StubOutWithMock(db, 'bm_interface_get') db.bm_node_get(self.context, node_id) db.bm_interface_create(self.context, bm_node_id=node_id, address=address, datapath_id=None, port_no=None).\ AndReturn(12345) db.bm_interface_get(self.context, 12345).\ AndReturn({'id': 12345, 'address': address}) self.mox.ReplayAll() res_dict = self.controller._add_interface(self.request, node_id, body) self.assertEqual(12345, res_dict['interface']['id']) self.assertEqual(address, res_dict['interface']['address']) def test_remove_interface(self): node_id = 1 interfaces = [{'id': 1}, {'id': 2}, {'id': 3}, ] body = {'remove_interface': {'id': 2}} self.mox.StubOutWithMock(db, 'bm_node_get') self.mox.StubOutWithMock(db, 'bm_interface_get_all_by_bm_node_id') self.mox.StubOutWithMock(db, 'bm_interface_destroy') db.bm_node_get(self.context, node_id) db.bm_interface_get_all_by_bm_node_id(self.context, node_id).\ AndReturn(interfaces) db.bm_interface_destroy(self.context, 2) self.mox.ReplayAll() self.controller._remove_interface(self.request, node_id, body) def test_remove_interface_by_address(self): node_id = 1 interfaces = [{'id': 1, 'address': '11:11:11:11:11:11'}, {'id': 2, 'address': '22:22:22:22:22:22'}, {'id': 3, 'address': '33:33:33:33:33:33'}, ] self.mox.StubOutWithMock(db, 'bm_node_get') self.mox.StubOutWithMock(db, 'bm_interface_get_all_by_bm_node_id') self.mox.StubOutWithMock(db, 'bm_interface_destroy') db.bm_node_get(self.context, node_id) db.bm_interface_get_all_by_bm_node_id(self.context, node_id).\ AndReturn(interfaces) db.bm_interface_destroy(self.context, 2) self.mox.ReplayAll() body = {'remove_interface': {'address': '22:22:22:22:22:22'}} self.controller._remove_interface(self.request, node_id, body) def test_remove_interface_no_id_no_address(self): node_id = 1 self.mox.StubOutWithMock(db, 'bm_node_get') db.bm_node_get(self.context, node_id) self.mox.ReplayAll() body = {'remove_interface': {}} self.assertRaises(exc.HTTPBadRequest, self.controller._remove_interface, self.request, node_id, body) def test_add_interface_node_not_found(self): node_id = 1 self.mox.StubOutWithMock(db, 'bm_node_get') db.bm_node_get(self.context, node_id).\ AndRaise(exception.NodeNotFound(node_id=node_id)) self.mox.ReplayAll() body = {'add_interface': {'address': '11:11:11:11:11:11'}} self.assertRaises(exc.HTTPNotFound, self.controller._add_interface, self.request, node_id, body) def test_remove_interface_node_not_found(self): node_id = 1 self.mox.StubOutWithMock(db, 'bm_node_get') db.bm_node_get(self.context, node_id).\ AndRaise(exception.NodeNotFound(node_id=node_id)) self.mox.ReplayAll() body = {'remove_interface': {'address': '11:11:11:11:11:11'}} self.assertRaises(exc.HTTPNotFound, self.controller._remove_interface, self.request, node_id, body)
apache-2.0
-2,591,924,370,840,028,000
40.155251
78
0.564629
false
3.683286
true
false
false
simba518/CollisionHandle
script/rename.py
1
1061
#! /usr/bin/env python import os import glob def addSubFold(file_path): file_name = os.path.basename(file_path) file_dir = os.path.dirname(file_path) sub_fold = file_dir + "/" + file_name[0:5] if not os.path.exists(sub_fold): os.makedirs(sub_fold) return sub_fold + "/" + file_name def changeName(old_name): new_name = old_name if old_name[-6] == '_': new_name = old_name[0:-5] + "00" + old_name[-5:len(old_name)] elif old_name[-7] == '_': new_name = old_name[0:-6] + "0" + old_name[-6:len(old_name)] return new_name def reName(dir_name,ext = "*.obj"): files = glob.glob(dir_name+ext) for old_name in files: new_name = addSubFold(changeName(old_name)) print new_name if new_name != old_name: os.system("mv " + old_name + " " + new_name) # reName("./data/dino/tempt_cubes_test/obj/") # reName("./data/dragon/tempt_stair/obj/") reName("./data/bunny/tempt_one2/obj/") # reName("./data/longcube/tempt_ica/obj/") # reName("./data/longcube/tempt_ica2/obj/")
apache-2.0
4,502,206,236,536,143,400
30.205882
69
0.595664
false
2.699746
false
false
false
haaja/osinfo
src/osinfo.py
1
16942
#!/usr/bin/env python # -*- coding: utf-8 -*- # # # osinfo.py # # Prints miscellaneous information about linux host. # # Licence: MIT (See LICENCE file or http://opensource.org/licenses/MIT) # Author: Janne Haapsaari <haaja@iki.fi> # import platform import getpass import subprocess import sys import os def get_distro_logo(distro='default'): """ Function returns ascii representation for distro logo. Keyword arguments: distro -- name of the linux distribution """ logo = { 'ubuntu': " \n" + " ..''''''.. \n" + " .;::::::::::::::;. \n" + " .;::::::::::::::'.':::;. \n" + " .;::::::::;,'..';. .::::;. \n" + " .:::::::,.,. ....:::::::. {uptime} \n" + ".:::::::. :;::::,. .:::::::. {kernel} \n" + ";:::::: .::::::::::. ::::::; {host} \n" + ":::. .' ::::::::::::...,:::::: {distro} \n" + ":::. .' ::::::::::::...,:::::: {user} \n" + ";:::::: .::::::::::. ::::::; {load} \n" + ".:::::::. :,;::;,. .:::::::. {reso} \n" + " .:::::::;.;. ....:::::::. {pkgs} \n" + " ;::::::::;,'..';. .::::; {desktop} \n" + " .;::::::::::::::'.':::;. \n" + " .,::::::::::::::,. \n" + " ...''''... \n" + " \n", 'fedora': ' \n' + ' ___ \n' + ' ,g@@@@@@@@@@@p, \n' + ' ,@@@@@@@@@@@D****4@@. \n' + ' ,@@@@@@@@@@P` `%@. {uptime} \n' + ' y@@@@@@@@@@F ,g@@p. !3@k {kernel} \n' + ' !@@@@@@@@@@@. !@@@@@@@@@@@@k {host} \n' + ':@@@@@@@@@@@@ J@@@@@@@@@@@@@L {distro} \n' + 'J@@@@@@@@@*** `***@@@@@@@@@@) {user} \n' + 'J@@@@@@@@@ @@@@@@@@@@) {load} \n' + 'J@@@@@@@@@@@@ J@@@@@@@@@@@@@L {reso} \n' + 'J@@@@@@@@@@@@ J@@@@@@@@@@@@F {pkgs} \n' + 'J@@@@@@@@@@@F {@@@@@@@@@@@F {desktop} \n' + 'J@@@E. ``*^` i@@@@@@@@@@B^ \n' + 'J@@@@@._ ,@@@@@@@@@@P` \n' + 'J@@@@@@@@@@@@@@@@@@BP*` \n' + ' \n', 'mint': ' \n' + '.:::::::::::::::::::::::::;,. \n' + ',0000000000000000000000000000Oxl, \n' + ',00, ..,cx0Oo. \n' + ',00, ,,. .cO0o \n' + ',00l,,. `00; .. .. .k0x \n' + '`kkkkO0l `00; ck000Odlk000Oo. .00c {uptime} \n' + ' d0k `00; x0O:.`d00O;.,k00. x0x {kernel} \n' + ' d0k `00; .00x ,00o ;00c d0k {host} \n' + ' d0k `00; .00d ,00o ,00c d0k {distro} \n' + ' d0k `00; .00d ,00o ,00c d0k {user} \n' + ' d0k `00; ;;` .;;. .cc` d0k {load} \n' + ' d0O .00d ... d0k {reso} \n' + ' ;00, :00x:,,,, ..... d0k {pkgs} \n' + ' o0O, .:dO000k........... d0k {desktop} \n' + ' :O0x, x0k \n' + ' :k0Odc,`.................;x00k \n' + ' .;lxO0000000000000000000000k \n' + ' ...................... \n' + ' \n', 'debian': ' \n' + ' _,met$$$$$gg. \n' + ' ,g$$$$$$$$$$$$$$$P. \n' + ' ,g$$P$$ $$$Y$$.$. \n' + ' ,$$P` `$$$. \n' + ',$$P ,ggs. `$$b: {uptime} \n' + 'd$$` ,$P$` . $$$ {kernel} \n' + '$$P d$` , $$P {host} \n' + '$$: $$. - ,d$$` {distro} \n' + '$$; Y$b._ _,d$P` {user} \n' + 'Y$$. .`$Y$$$$P$` {load} \n' + '`$$b $-.__ {reso} \n' + ' `Y$$b {pkgs} \n' + ' `Y$$. {desktop} \n' + ' `$$b. \n' + ' `Y$$b. \n' + ' `$Y$b._ \n' + ' `$$$$ \n' + ' \n', 'arch': ' \n' + ' + \n' + ' # \n' + ' ### \n' + ' ##### \n' + ' ###### \n' + ' # ###### {uptime} \n' + ' ### ##### {kernel} \n' + ' ############# {host} \n' + ' ############### {distro} \n' + ' ################# {user} \n' + ' ####### ###### {load} \n' + ' ###### ####### {reso} \n' + ' ####### ### ## {pkgs} \n' + ' ######### ###### {desktop} \n' + ' ###### ###### \n' + ' #### #### \n' + ' ## ## \n' + '# # \n', 'crunchbang': ' ___ ___ _ \n' + ' / / / / | | \n' + ' / / / / | | \n' + ' / / / / | | {uptime} \n' + ' _______/ /______/ /______ | | {kernel} \n' + ' /______ _______ _______/ | | {host} \n' + ' / / / / | | {distro} \n' + ' / / / / | | {user} \n' + ' / / / / | | {load} \n' + ' ______/ /______/ /______ | | {reso} \n' + '/_____ _______ _______/ | | {pkgs} \n' + ' / / / / |_| {desktop} \n' + ' / / / / _ \n' + ' / / / / | | \n' + ' /__/ /__/ |_| \n', 'default': ' \n' + ' .88888888:. \n' + ' 88888888.88888. \n' + ' .8888888888888888. \n' + ' 888888888888888888 \n' + ' 88| _`88|_ `88888 \n' + ' 88 88 88 88 88888 \n' + ' 88_88_::_88_:88888 \n' + ' 88:::,::,:::::8888 \n' + ' 88`:::::::::``8888 \n' + ' .88 `::::` 8:88. \n' + ' 8888 `8:888. \n' + ' .8888` `888888. \n' + ' .8888:.. .::. ...:`8888888:. \n' + ' .8888.| :| `|::`88:88888 \n' + ' .8888 ` `.888:8888. {uptime} \n' + ' 888:8 . 888:88888 {kernel} \n' + ' .888:88 .: 888:88888: {host} \n' + ' 8888888. :: 88:888888 {distro} \n' + ' `.::.888. :: .88888888 {user} \n' + ' .::::::.888. :: :::`8888`.:. {load} \n' + ' ::::::::::.888 | .:::::::::::: {reso} \n' + ' ::::::::::::.8 | .:8::::::::::::. {pkgs} \n' + '.::::::::::::::. .:888::::::::::::: {desktop} \n' + ':::::::::::::::88:.__..:88888:::::::::::` \n' + ' ``.:::::::::::88888888888.88:::::::::` \n' + ' ``:::_:` -- `` -`-` ``:_::::`` \n' + ' \n' } if distro in logo: return logo[distro] else: return logo['default'] def get_uptime(): """ Returns system uptime. Uptime is read from /proc/uptime file and converted to human readable format. """ try: file = open('/proc/uptime', 'r') data = file.read().split() file.close() except IOError: raise OSInfoError('Unable to open file: /proc/uptime') total_seconds = float(data[0]) # Helper variables minute = 60 hour = minute * 60 day = hour * 24 days = int(total_seconds / day) hours = int((total_seconds % day) / hour) minutes = int((total_seconds % hour) / minute) seconds = int(total_seconds % minute) # build up the result string uptime = '' if days > 0: uptime += str(days) + ' ' + (days == 1 and 'day, ' or 'days, ') if hours > 0: uptime += str(hours) + ' ' + (hours == 1 and 'hour, ' or 'hours, ') if minutes > 0: uptime += str(minutes) + ' ' \ + (minutes == 1 and 'minute, ' or 'minutes, ') if seconds > 0: uptime += str(seconds) + ' ' \ + (seconds == 1 and 'second, ' or 'seconds') return uptime def get_loadavg(): """Returns load averages Load averages are read from /proc/loadavg file. """ try: file = open('/proc/loadavg', 'r') loads = file.readline().split() file.close() except IOError: raise OSInfoError('Unable to open file: /proc/loadavg') return str(loads[0] + ', ' + loads[1] + ', ' + loads[2]) def get_kernel_version(): """Returns kernel version string.""" return platform.uname()[2] def get_hostname(): """Returns hostname.""" return platform.node() def get_distribution_info(): """Returns distribution info.""" distro = platform.linux_distribution() if distro == ('', '', ''): try: result = subprocess.check_output(['lsb_release', '-ds']) result = result.strip("\n\"") except subprocess.CalledProcessError: raise OSInfoError('Unable to execute lsb_release: ' + str(sys.exc_info())) else: result = distro[0] + ' ' + distro[1] + ' ' + distro[2] return result def get_distribution_name(): """Returns distribution name.""" distro = platform.linux_distribution()[0] if len(distro) == 0: try: distro = subprocess.check_output(['lsb_release', '-is']) distro = distro.strip() except subprocess.CalledProcessError: raise OSInfoError('Unable to execute lsb_release: ' + str(sys.exc_info())) return distro.lower() def get_username(): """Returns the login name of the user.""" return getpass.getuser() def get_resolution(): """Returns screen resolution using xrandr. Currently supports only single monitor setups. """ try: output = subprocess.check_output(['xrandr']) except subprocess.CalledProcessError: raise OSInfoError('Unable to execute xrandr: ' + str(sys.exc_info())) output = output.decode('utf-8') output = output.split('\n') for line in output: if "*" in line: line = line.strip() line = line.split()[0] return line return "Unable to get screen resolution using xrandr." def get_number_of_installed_packages(): """Returns the number of packages installed via distributions package manager Note: This is really slow! Supports following distributions: - Fedora - SuSE - CentOs - RedHat - Ubuntu - Debian - Mint - Arch """ distro = get_distribution_name() if (distro == 'fedora' or distro == 'suse' or distro == 'centos' or distro == 'redhat'): try: output = subprocess.check_output(['rpm', '-qa']) except subprocess.CalledProcessError: raise OSInfoError('Unable to execute rpm: ' + str(sys.exc_info())) elif distro == 'debian' or distro == 'ubuntu' or distro == 'mint': try: output = subprocess.check_output(['dpkg', '-l']) except subprocess.CalledProcessError: raise OSInfoError('Unable to execute dpkg: ' + str(sys.exc_info())) elif distro == 'arch': try: output = subprocess.check_output(['pacman', '-Q']) except subprocess.CalledProcessError: raise OSInfoError('Unable to execute pacman: ' + str(sys.exc_info())) else: return 'Unable to count packages' output = output.decode('utf-8') output = output.split('\n') return str(len(output)) def get_desktop_environment(): """Try to guess the current desktop environment""" desktop = os.environ.get('XDG_CURRENT_DESKTOP') if desktop is None: desktop = os.environ.get('DESKTOP_SESSION') if desktop is None: raise OSInfoError("Unable to get desktop environment") return desktop def get_desktop_version(desktop): """Returns the desktop environment version string.""" desktop = desktop.lower() if desktop == 'gnome': desktop = 'gnome-shell --version' elif desktop == 'kde': desktop = 'kde4-config --version' else: return "not available" command = desktop.split() try: output = subprocess.check_output(command) except subprocess.CalledProcessError as err: # gnome-session --version returns 1 instead of 0. The bug is fixed # in GNOME 3.7.3 but here's a hack to make this work with earlier # versions. output = err.output output = output.decode('utf-8') result = output.split() if len(result) > 2: return result[2] else: return result[1] def print_info(): """Print OS information""" logo = get_distro_logo(get_distribution_name()) desktop = get_desktop_environment() info = {} info['uptime'] = "Uptime: " + get_uptime() info['kernel'] = "Kernel: " + get_kernel_version() info['host'] = "Hostname: " + get_hostname() info['distro'] = "Distribution: " + get_distribution_name() info['user'] = "User: " + get_username() info['load'] = "Loadavg: " + get_loadavg() info['reso'] = "Resolution: " + get_resolution() info['pkgs'] = "Installed packages: " \ + get_number_of_installed_packages() info['desktop'] = "Desktop: " + desktop + " " \ + get_desktop_version(desktop) print(logo.format(**info)) class OSInfoError(Exception): """Trivial OSInfo error.""" pass if __name__ == '__main__': try: print_info() except OSInfoError as err: raise err
mit
4,414,962,370,184,419,000
39.338095
75
0.31177
false
3.843466
false
false
false
morenopc/edx-platform
lms/lib/comment_client/utils.py
3
4274
from contextlib import contextmanager from dogapi import dog_stats_api import logging import requests from django.conf import settings from time import time from uuid import uuid4 from django.utils.translation import get_language log = logging.getLogger(__name__) def strip_none(dic): return dict([(k, v) for k, v in dic.iteritems() if v is not None]) def strip_blank(dic): def _is_blank(v): return isinstance(v, str) and len(v.strip()) == 0 return dict([(k, v) for k, v in dic.iteritems() if not _is_blank(v)]) def extract(dic, keys): if isinstance(keys, str): return strip_none({keys: dic.get(keys)}) else: return strip_none({k: dic.get(k) for k in keys}) def merge_dict(dic1, dic2): return dict(dic1.items() + dic2.items()) @contextmanager def request_timer(request_id, method, url, tags=None): start = time() with dog_stats_api.timer('comment_client.request.time', tags=tags): yield end = time() duration = end - start log.info( "comment_client_request_log: request_id={request_id}, method={method}, " "url={url}, duration={duration}".format( request_id=request_id, method=method, url=url, duration=duration ) ) def perform_request(method, url, data_or_params=None, raw=False, metric_action=None, metric_tags=None, paged_results=False): if metric_tags is None: metric_tags = [] metric_tags.append(u'method:{}'.format(method)) if metric_action: metric_tags.append(u'action:{}'.format(metric_action)) if data_or_params is None: data_or_params = {} headers = { 'X-Edx-Api-Key': getattr(settings, "COMMENTS_SERVICE_KEY", None), 'Accept-Language': get_language(), } request_id = uuid4() request_id_dict = {'request_id': request_id} if method in ['post', 'put', 'patch']: data = data_or_params params = request_id_dict else: data = None params = merge_dict(data_or_params, request_id_dict) with request_timer(request_id, method, url, metric_tags): response = requests.request( method, url, data=data, params=params, headers=headers, timeout=5 ) metric_tags.append(u'status_code:{}'.format(response.status_code)) if response.status_code > 200: metric_tags.append(u'result:failure') else: metric_tags.append(u'result:success') dog_stats_api.increment('comment_client.request.count', tags=metric_tags) if 200 < response.status_code < 500: raise CommentClientRequestError(response.text, response.status_code) # Heroku returns a 503 when an application is in maintenance mode elif response.status_code == 503: raise CommentClientMaintenanceError(response.text) elif response.status_code == 500: raise CommentClient500Error(response.text) else: if raw: return response.text else: data = response.json() if paged_results: dog_stats_api.histogram( 'comment_client.request.paged.result_count', value=len(data.get('collection', [])), tags=metric_tags ) dog_stats_api.histogram( 'comment_client.request.paged.page', value=data.get('page', 1), tags=metric_tags ) dog_stats_api.histogram( 'comment_client.request.paged.num_pages', value=data.get('num_pages', 1), tags=metric_tags ) return data class CommentClientError(Exception): def __init__(self, msg): self.message = msg def __str__(self): return repr(self.message) class CommentClientRequestError(CommentClientError): def __init__(self, msg, status_code=400): super(CommentClientRequestError, self).__init__(msg) self.status_code = status_code class CommentClient500Error(CommentClientError): pass class CommentClientMaintenanceError(CommentClientError): pass
agpl-3.0
7,686,069,124,591,405,000
28.273973
80
0.597099
false
3.885455
false
false
false
kosgroup/odoo
addons/project_issue/tests/test_issue_process.py
24
1416
# -*- coding: utf-8 -*- from odoo.addons.project_issue.tests.common import TestIssueUsers class TestIssueProcess(TestIssueUsers): def test_issue_process(self): # Sending mail to get more details. vals = {'email_from': 'support@mycompany.com', 'email_to': 'Robert_Adersen@yahoo.com', 'subject': 'We need more details regarding your issue in HR module', 'body_html': """ <p> Hello Mr. Adersen, </p> <p> We need more details about your issue in the HR module. Could you please send us complete details about the error eg. error message, traceback or what operations you were doing when you the error occured ? </p> <p> Thank You. </p> <pre> -- YourCompany info@yourcompany.example.com +1 555 123 8069 </pre> """} crm_bug_id = self.ref('project_issue.crm_case_buginaccountsmodule0') mail = self.env["mail.mail"].with_context(active_model='project.issue', active_id=crm_bug_id, active_ids=[crm_bug_id]).create(vals) mail.send()
gpl-3.0
-3,139,989,795,240,224,300
37.27027
96
0.477401
false
4.553055
false
false
false