text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import csv
from bs4 import BeautifulSoup
from collections import Counter
import re
import os
OUTPUT_NAME = os.getenv('OUTPUT_NAME',
'data_detikcom_labelled_740_7_class.csv')
csv_file = open('data_detikcom_labelled_740.csv')
csv_reader = csv.DictReader(csv_file)
# Tranform individual label to candidate pair label
label_map = {
'pos_ahok': 'pos_ahok_djarot',
'pos_djarot': 'pos_ahok_djarot',
'pos_anies': 'pos_anies_sandi',
'pos_sandi': 'pos_anies_sandi',
'pos_agus': 'pos_agus_sylvi',
'pos_sylvi': 'pos_agus_sylvi',
'neg_ahok': 'neg_ahok_djarot',
'neg_djarot': 'neg_ahok_djarot',
'neg_anies': 'neg_anies_sandi',
'neg_sandi': 'neg_anies_sandi',
'neg_agus': 'neg_agus_sylvi',
'neg_sylvi': 'neg_agus_sylvi',
'oot': 'oot'
}
fields = ['title', 'raw_content', 'labels']
train_file = open(OUTPUT_NAME, 'w')
csv_writer = csv.DictWriter(train_file, fields)
csv_writer.writeheader()
for row in csv_reader:
title = row['title']
raw_content = row['raw_content']
labels = []
label_1 = row['sentiment_1']
if label_1 != '':
candidate_pair_label = label_map[label_1]
if not candidate_pair_label in labels:
labels.append(candidate_pair_label)
label_2 = row['sentiment_2']
if label_2 != '':
candidate_pair_label = label_map[label_2]
if not candidate_pair_label in labels:
labels.append(candidate_pair_label)
label_3 = row['sentiment_3']
if label_3 != '':
candidate_pair_label = label_map[label_3]
if not candidate_pair_label in labels:
labels.append(candidate_pair_label)
# Skip content if label not exists
if not labels: continue
label_str = ','.join(labels)
data_row = {'title': title, 'raw_content': raw_content,
'labels': label_str}
csv_writer.writerow(data_row)
print OUTPUT_NAME, 'created'
csv_file.close()
train_file.close()
| CodeRiderz/rojak | rojak-analyzer/convert_13_labels_to_7_labels.py | Python | bsd-3-clause | 1,929 | 0.004147 |
# Copyright (c) 2016 Iotic Labs Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://github.com/Iotic-Labs/py-IoticAgent/blob/master/LICENSE
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants to hide XSD Datatypes used by Point Values and Properties
These help to describe the data in a feed so the receiving Thing can know what kind of data to expect
See also http://www.w3.org/TR/xmlschema-2/#built-in-datatypes
"""
from __future__ import unicode_literals
BASE64 = 'base64Binary'
'''Represents a sequence of binary octets (bytes) encoded according to RFC 2045,
the standard defining the MIME types (look under "6.8 Base64 Content-Transfer-Encoding").
'''
BOOLEAN = 'boolean'
'''A Boolean true or false value. Representations of true are "true" and "1"; false is denoted as "false" or "0".'''
BYTE = 'byte'
'''A signed 8-bit integer in the range [-128 -> +127]. Derived from the short datatype.'''
UNSIGNED_BYTE = 'unsignedByte'
'''An unsigned 8-bit integer in the range [0, 255]. Derived from the unsignedShort datatype.'''
DATE = 'date'
'''Represents a specific date. The syntax is the same as that for the date part of dateTime,
with an optional time zone indicator. Example: "1889-09-24".
'''
DATETIME = 'dateTime'
'''
Represents a specific instant of time. It has the form YYYY-MM-DDThh:mm:ss followed by an optional time-zone suffix.
`YYYY` is the year, `MM` is the month number, `DD` is the day number,
`hh` the hour in 24-hour format, `mm` the minute, and `ss` the second (a decimal and fraction are allowed for the
seconds part).
The optional zone suffix is either `"Z"` for Universal Coordinated Time (UTC), or a time offset of the form
`"[+|-]hh:mm"`, giving the difference between UTC and local time in hours and minutes.
Example: "2004-10-31T21:40:35.5-07:00" is a time on Halloween 2004 in Mountain Standard time. The equivalent UTC would
be "2004-11-01T04:40:35.5Z".
'''
DECIMAL = 'decimal'
'''Any base-10 fixed-point number. There must be at least one digit to the left of the decimal point, and a leading "+"
or "-" sign is allowed.
Examples: "42", "-3.14159", "+0.004".
'''
DOUBLE = 'double'
'''A 64-bit floating-point decimal number as specified in the IEEE 754-1985 standard. The external form is the same as
the float datatype.
'''
FLOAT = 'float'
'''A 32-bit floating-point decimal number as specified in the IEEE 754-1985 standard.
Allowable values are the same as in the decimal type, optionally followed by an exponent,
or one of the special values "INF" (positive infinity), "-INF" (negative infinity), or "NaN" (not a number).
The exponent starts with either "e" or "E", optionally followed by a sign, and one or more digits.
Example: "6.0235e-23".
'''
INT = 'int'
'''Represents a 32-bit signed integer in the range [-2,147,483,648, 2,147,483,647]. Derived from the long datatype.'''
INTEGER = 'integer'
'''Represents a signed integer. Values may begin with an optional "+" or "-" sign. Derived from the decimal datatype.'''
LONG = 'long'
'''A signed, extended-precision integer; at least 18 digits are guaranteed. Derived from the integer datatype. '''
STRING = 'string'
'''Any sequence of zero or more characters.'''
TIME = 'time'
'''A moment of time that repeats every day. The syntax is the same as that for dateTime,
omitting everything up to and including the separator "T". Examples: "00:00:00" is midnight,
and "13:04:00" is an hour and four minutes after noon.
'''
URI = 'anyURI'
'''
The data must conform to the syntax of a Uniform Resource Identifier (URI), as defined in RFC 2396
as amended by RFC 2732. Example: "http://www.nmt.edu/tcc/"
is the URI for the New Mexico Tech Computer Center's index page.
'''
IRI = 'IRI'
'''Only for use with property API calls. Used to handle properties which require an IRI (URIRef) value.'''
| Iotic-Labs/py-IoticAgent | src/IoticAgent/Datatypes.py | Python | apache-2.0 | 4,222 | 0.005921 |
'''
Kaya Baber
Physics 440 - Computational Physics
Assignment 3
Problem 1
Hamiltonian Dynamics of a Nonlinear Pendulum
Consider a simple pendulum of length in
gravitational field g. The frequency in the limit of small angles is Ω_0 ≡ radical(g/l) , but do not assume the limit
of small angles for the following calculations.
(a) Start with the Hamiltonian and develop two first order equations for the angle θ and its conjugate
momentum p_θ .
((d^2)θ/d(t^2)) = - (g/l)sin(θ)
θ_dot = P_θ/(ml)^2
P_θ_dot = -mlsin(θ)
(b) Use a second-order leapfrog algorithm to compute the motion of the pendulum. If we choose a
computational unit of time [T ] = Ω_0^(−1) , then 2π computational time units equals one period in the limit of
small oscillations. Another way to think about it is that we can choose a set of units such that Ω_0 = 1.
Make a graph of phase space trajectories for a variety of initial conditions.
(c) Liouville’s Theorem states that the phase-space volume of a infinitesimally close ensemble of states is
conserved. Demonstrate Liouville’s Theorem by considering an ensemble of closely spaced initial conditions.
'''
| KayaBaber/Computational-Physics | Assignment_3_chaos_and_pendulums/Pre-GitHub-versions/Phys440_Assignment03_Prob1 (1).py | Python | mit | 1,186 | 0.009434 |
# Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed set.
myset = redis.set()
# Add items to the set.
myset.add('foo')
myset.add('bar')
# We can also create a named set by passing a key to the constructor.
myset = redis.set('myset')
myset.add('foo')
del myset
myset = redis.set('myset')
print myset # set([u'foo'])
myset.delete()
print myset # set()
| kuujo/active-redis | examples/set.py | Python | mit | 554 | 0.00722 |
#!/usr/bin/env python
# encoding: utf-8
"""A test module"""
import datetime
import tempfile
import os
import shutil
import scores.common as common
class TestCommon(object):
""" A Test class"""
def test_date_function(self):
"""Test"""
a_date = datetime.datetime.now()
a_date = a_date.replace(microsecond=0)
tstamp = common.datetime_to_timestamp(a_date)
assert tstamp > 0
converted_bck = common.timestamp_to_datetime(tstamp)
assert converted_bck == a_date
| zesk06/scores | tests/common_test.py | Python | mit | 524 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-31 00:22
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('configuration_management_tools', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='smslibgateways',
options={'managed': False, 'verbose_name': 'Gateways'},
),
migrations.AlterModelOptions(
name='smslibnumberroutes',
options={'managed': False, 'verbose_name': 'Routes'},
),
]
| lsantagata/smslib_ui | src/configuration_management_tools/migrations/0002_auto_20170731_0022.py | Python | gpl-3.0 | 608 | 0 |
import os, sys, shutil
# change back to the build dir
if os.path.dirname( sys.argv[0] ) != "":
os.chdir( os.path.dirname( sys.argv[0] ) )
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
import get_platform # fixes fat python 2.5
from ez_setup import use_setuptools
use_setuptools( download_delay=8, to_dir=scramble_lib )
from setuptools import *
# get the tag
if os.access( ".galaxy_tag", os.F_OK ):
tagfile = open( ".galaxy_tag", "r" )
tag = tagfile.readline().strip()
else:
tag = None
# in case you're running this by hand from a dirty module source dir
for dir in [ "build", "dist" ]:
if os.access( dir, os.F_OK ):
print "scramble.py: removing dir:", dir
shutil.rmtree( dir )
# reset args for distutils
me = sys.argv[0]
sys.argv = [ me ]
sys.argv.append( "egg_info" )
if tag is not None:
#sys.argv.append( "egg_info" )
sys.argv.append( "--tag-build=%s" %tag )
# svn revision (if any) is handled directly in tag-build
sys.argv.append( "--no-svn-revision" )
sys.argv.append( "bdist_egg" )
# do it
execfile( "setup.py", globals(), locals() )
| volpino/Yeps-EURAC | scripts/scramble/scripts/generic.py | Python | mit | 1,149 | 0.035683 |
"""
__graph_MT_post__OUT2.py___________________________________________________________
Automatically generated graphical appearance ---> MODIFY DIRECTLY WITH CAUTION
__________________________________________________________________________
"""
import tkFont
from graphEntity import *
from GraphicalForm import *
from ATOM3Constraint import *
class graph_MT_post__OUT2(graphEntity):
def __init__(self, x, y, semObject = None):
self.semanticObject = semObject
self.sizeX, self.sizeY = 172, 82
graphEntity.__init__(self, x, y)
self.ChangesAtRunTime = 0
self.constraintList = []
if self.semanticObject: atribs = self.semanticObject.attributesToDraw()
else: atribs = None
self.graphForms = []
self.imageDict = self.getImageDict()
def DrawObject(self, drawing, showGG = 0):
self.dc = drawing
if showGG and self.semanticObject: self.drawGGLabel(drawing)
h = drawing.create_oval(self.translate([189.0, 62.0, 189.0, 62.0]), tags = (self.tag, 'connector'), outline = '', fill = '' )
self.connectors.append( h )
h = drawing.create_rectangle(self.translate([20.0, 20.0, 190.0, 100.0]), tags = self.tag, stipple = '', width = 1, outline = 'black', fill = 'moccasin')
self.gf4 = GraphicalForm(drawing, h, "gf4")
self.graphForms.append(self.gf4)
font = tkFont.Font( family='Arial', size=12, weight='normal', slant='roman', underline=0)
h = drawing.create_text(self.translate([81.0, 37.0, 81.0, 12.0])[:2], tags = self.tag, font=font, fill = 'black', anchor = 'center', text = 'MT_post__OUT2', width = '0', justify= 'left', stipple='' )
self.gf128 = GraphicalForm(drawing, h, 'gf128', fontObject=font)
self.graphForms.append(self.gf128)
helv12 = tkFont.Font ( family="Helvetica", size=12, weight="bold" )
h = drawing.create_text(self.translate([-3, -3]), font=helv12,
tags = (self.tag, self.semanticObject.getClass()),
fill = "black",
text=self.semanticObject.MT_label__.toString())
self.attr_display["MT_label__"] = h
self.gf_label = GraphicalForm(drawing, h, 'gf_label', fontObject=helv12)
self.graphForms.append(self.gf_label)
def postCondition( self, actionID, * params):
return None
def preCondition( self, actionID, * params):
return None
def getImageDict( self ):
imageDict = dict()
return imageDict
new_class = graph_MT_post__OUT2
| levilucio/SyVOLT | UMLRT2Kiltera_MM/graph_MT_post__OUT2.py | Python | mit | 2,604 | 0.024578 |
r'''
<license>
CSPLN_MaryKeelerEdition; Manages images to which notes can be added.
Copyright (C) 2015-2016, Thomas Kercheval
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
___________________________________________________________</license>
Description:
For creating CSPLN webapps for LINUX, from scaffolding.
Inputs:
Version number, of MKE_vxx_xx_xx scaffolding file.
Where each x corresponds to a current version number.
Input as "xx_xx_xx"
Number of web applications
Outputs:
Web applications, number depends on Input.
Puts web2py.py in each web_app (not included in windows version).
Puts scaffolding (current app version) into each web2py frame.
Renames scaffolding application to 'MKE_Static_Name'.
Currently:
To Do:
Done:
'''
import os, sys, shutil
from the_decider import resolve_relative_path as resolve_path
def check_file_exist(path):
"""Check if the file at the given path exists."""
if os.path.exists(path):
pass
else:
sys.exit('File {} doesn\'t exist'.format(path))
return None
def grab_out_paths(number_apps, app_path):
"""
From the number of applications necessary, create a list
of pathnames where we will create linux applications.
"""
out_dir = resolve_path(__file__, app_path)
project_part = 'P{}'
os = "linux"
out_paths = []
for num in range(1, number_apps + 1):
strin = project_part.format(str(num))
print "{part}, preparing for generation.".format(part=strin)
out_paths.append(out_dir.format(os=os, pat=strin))
return out_paths
def grab_web2py_frame():
"""Grab the path of the web2py framework and check its existence."""
webframe = resolve_path(__file__, '../apps/scaffolding/linux/web2py')
webdotpy = resolve_path(__file__,'../apps/scaffolding/common/web2py.py')
check_file_exist(webdotpy)
check_file_exist(webframe)
return webframe, webdotpy
def grab_scaffold_app(current_version):
"""Grab the path of our scaffolding and check its existence."""
mkever = '../apps/scaffolding/version/MKE_v{}'.format(current_version)
mkever = resolve_path(__file__, mkever)
check_file_exist(mkever)
return mkever
def copy_webframez(number_apps, app_path):
"""
For each path where we intend to create a linux application,
create a copy of the web2py framework and a modified copy
of web2py.py.
"""
webframe, webdotpy = grab_web2py_frame()
out_paths = grab_out_paths(number_apps, app_path)
for path in out_paths:
shutil.copytree(webframe, os.path.join(path, 'web2py'))
next_path = os.path.join(path, 'web2py')
shutil.copy(webdotpy, next_path)
print ' web2py frame copied to: {}'.format(path)
print ' web2py.py copied to: {}'.format(next_path)
return out_paths
def modify_out_paths(int_paths):
"""
Modifies the out_paths from the locations of the web2py framework
to where our applications will be generated.
"""
mod_out = []
addition = 'web2py/applications'
for path in int_paths:
new_path = os.path.join(path, addition)
mod_out.append(new_path)
return mod_out
def grab_filename_from_path(in_path):
"""Input a path, return last chunck"""
import ntpath
head, tail = ntpath.split(in_path)
return tail or ntpath.basename(head)
def copy_app(version, out_paths):
"""
Creates an application for every copy of the web2py framework,
from scaffolding application.
"""
scaff_app = grab_scaffold_app(version)
filename = grab_filename_from_path(scaff_app)
for path in out_paths:
shutil.copytree(scaff_app, os.path.join(path, filename))
old_name = os.path.join(path, filename)
new_name = os.path.join(path, 'MKE_Static_Name')
os.rename(old_name, new_name)
return None
def deploy_scaffolding(version_now, num_apps, app_path):
"""
Deploys the web2py framework and the current version of our
scaffolding, as many times as is necessary.
"""
print "\n Creating Linux applications...\n" + "_"*79
out_paths = copy_webframez(num_apps, app_path)
new_paths = modify_out_paths(out_paths)
copy_app(version_now, new_paths)
print "_"*79
return None
if __name__ == "__main__":
NUM_APPS = 10
VERSION = '00_01_02'
APP_PATH = '../apps/web_apps/{os}/{pat}'
deploy_scaffolding(VERSION, NUM_APPS, APP_PATH)
| SpaceKatt/CSPLN | scripts/create_web_apps_linux.py | Python | gpl-3.0 | 5,043 | 0.00238 |
from __future__ import print_function
import glob
import os
import re
import warnings
try:
from importlib import import_module
except ImportError:
import_module = __import__
from distutils.version import LooseVersion
import pytest
import numpy as np
from numpy.random import rand
from pandas import (DataFrame, MultiIndex, read_csv, Timestamp, Index,
date_range, Series)
from pandas.compat import (map, zip, StringIO, string_types, BytesIO,
is_platform_windows, PY3)
from pandas.io.common import URLError, urlopen, file_path_to_url
from pandas.io.html import read_html
from pandas._libs.parsers import ParserError
import pandas.util.testing as tm
from pandas.util.testing import makeCustomDataframe as mkdf, network
def _have_module(module_name):
try:
import_module(module_name)
return True
except ImportError:
return False
def _skip_if_no(module_name):
if not _have_module(module_name):
pytest.skip("{0!r} not found".format(module_name))
def _skip_if_none_of(module_names):
if isinstance(module_names, string_types):
_skip_if_no(module_names)
if module_names == 'bs4':
import bs4
if bs4.__version__ == LooseVersion('4.2.0'):
pytest.skip("Bad version of bs4: 4.2.0")
else:
not_found = [module_name for module_name in module_names if not
_have_module(module_name)]
if set(not_found) & set(module_names):
pytest.skip("{0!r} not found".format(not_found))
if 'bs4' in module_names:
import bs4
if bs4.__version__ == LooseVersion('4.2.0'):
pytest.skip("Bad version of bs4: 4.2.0")
DATA_PATH = tm.get_data_path()
def assert_framelist_equal(list1, list2, *args, **kwargs):
assert len(list1) == len(list2), ('lists are not of equal size '
'len(list1) == {0}, '
'len(list2) == {1}'.format(len(list1),
len(list2)))
msg = 'not all list elements are DataFrames'
both_frames = all(map(lambda x, y: isinstance(x, DataFrame) and
isinstance(y, DataFrame), list1, list2))
assert both_frames, msg
for frame_i, frame_j in zip(list1, list2):
tm.assert_frame_equal(frame_i, frame_j, *args, **kwargs)
assert not frame_i.empty, 'frames are both empty'
def test_bs4_version_fails():
_skip_if_none_of(('bs4', 'html5lib'))
import bs4
if bs4.__version__ == LooseVersion('4.2.0'):
tm.assert_raises(AssertionError, read_html, os.path.join(DATA_PATH,
"spam.html"),
flavor='bs4')
class ReadHtmlMixin(object):
def read_html(self, *args, **kwargs):
kwargs.setdefault('flavor', self.flavor)
return read_html(*args, **kwargs)
class TestReadHtml(ReadHtmlMixin):
flavor = 'bs4'
spam_data = os.path.join(DATA_PATH, 'spam.html')
spam_data_kwargs = {}
if PY3:
spam_data_kwargs['encoding'] = 'UTF-8'
banklist_data = os.path.join(DATA_PATH, 'banklist.html')
@classmethod
def setup_class(cls):
_skip_if_none_of(('bs4', 'html5lib'))
def test_to_html_compat(self):
df = mkdf(4, 3, data_gen_f=lambda *args: rand(), c_idx_names=False,
r_idx_names=False).applymap('{0:.3f}'.format).astype(float)
out = df.to_html()
res = self.read_html(out, attrs={'class': 'dataframe'}, index_col=0)[0]
tm.assert_frame_equal(res, df)
@network
def test_banklist_url(self):
url = 'http://www.fdic.gov/bank/individual/failed/banklist.html'
df1 = self.read_html(url, 'First Federal Bank of Florida',
attrs={"id": 'table'})
df2 = self.read_html(url, 'Metcalf Bank', attrs={'id': 'table'})
assert_framelist_equal(df1, df2)
@network
def test_spam_url(self):
url = ('http://ndb.nal.usda.gov/ndb/foods/show/1732?fg=&man=&'
'lfacet=&format=&count=&max=25&offset=&sort=&qlookup=spam')
df1 = self.read_html(url, '.*Water.*')
df2 = self.read_html(url, 'Unit')
assert_framelist_equal(df1, df2)
@tm.slow
def test_banklist(self):
df1 = self.read_html(self.banklist_data, '.*Florida.*',
attrs={'id': 'table'})
df2 = self.read_html(self.banklist_data, 'Metcalf Bank',
attrs={'id': 'table'})
assert_framelist_equal(df1, df2)
def test_spam_no_types(self):
# infer_types removed in #10892
df1 = self.read_html(self.spam_data, '.*Water.*')
df2 = self.read_html(self.spam_data, 'Unit')
assert_framelist_equal(df1, df2)
assert df1[0].iloc[0, 0] == 'Proximates'
assert df1[0].columns[0] == 'Nutrient'
def test_spam_with_types(self):
df1 = self.read_html(self.spam_data, '.*Water.*')
df2 = self.read_html(self.spam_data, 'Unit')
assert_framelist_equal(df1, df2)
assert df1[0].iloc[0, 0] == 'Proximates'
assert df1[0].columns[0] == 'Nutrient'
def test_spam_no_match(self):
dfs = self.read_html(self.spam_data)
for df in dfs:
assert isinstance(df, DataFrame)
def test_banklist_no_match(self):
dfs = self.read_html(self.banklist_data, attrs={'id': 'table'})
for df in dfs:
assert isinstance(df, DataFrame)
def test_spam_header(self):
df = self.read_html(self.spam_data, '.*Water.*', header=1)[0]
assert df.columns[0] == 'Proximates'
assert not df.empty
def test_skiprows_int(self):
df1 = self.read_html(self.spam_data, '.*Water.*', skiprows=1)
df2 = self.read_html(self.spam_data, 'Unit', skiprows=1)
assert_framelist_equal(df1, df2)
def test_skiprows_xrange(self):
df1 = self.read_html(self.spam_data, '.*Water.*', skiprows=range(2))[0]
df2 = self.read_html(self.spam_data, 'Unit', skiprows=range(2))[0]
tm.assert_frame_equal(df1, df2)
def test_skiprows_list(self):
df1 = self.read_html(self.spam_data, '.*Water.*', skiprows=[1, 2])
df2 = self.read_html(self.spam_data, 'Unit', skiprows=[2, 1])
assert_framelist_equal(df1, df2)
def test_skiprows_set(self):
df1 = self.read_html(self.spam_data, '.*Water.*', skiprows=set([1, 2]))
df2 = self.read_html(self.spam_data, 'Unit', skiprows=set([2, 1]))
assert_framelist_equal(df1, df2)
def test_skiprows_slice(self):
df1 = self.read_html(self.spam_data, '.*Water.*', skiprows=1)
df2 = self.read_html(self.spam_data, 'Unit', skiprows=1)
assert_framelist_equal(df1, df2)
def test_skiprows_slice_short(self):
df1 = self.read_html(self.spam_data, '.*Water.*', skiprows=slice(2))
df2 = self.read_html(self.spam_data, 'Unit', skiprows=slice(2))
assert_framelist_equal(df1, df2)
def test_skiprows_slice_long(self):
df1 = self.read_html(self.spam_data, '.*Water.*', skiprows=slice(2, 5))
df2 = self.read_html(self.spam_data, 'Unit', skiprows=slice(4, 1, -1))
assert_framelist_equal(df1, df2)
def test_skiprows_ndarray(self):
df1 = self.read_html(self.spam_data, '.*Water.*',
skiprows=np.arange(2))
df2 = self.read_html(self.spam_data, 'Unit', skiprows=np.arange(2))
assert_framelist_equal(df1, df2)
def test_skiprows_invalid(self):
with tm.assert_raises_regex(TypeError, 'is not a valid type '
'for skipping rows'):
self.read_html(self.spam_data, '.*Water.*', skiprows='asdf')
def test_index(self):
df1 = self.read_html(self.spam_data, '.*Water.*', index_col=0)
df2 = self.read_html(self.spam_data, 'Unit', index_col=0)
assert_framelist_equal(df1, df2)
def test_header_and_index_no_types(self):
df1 = self.read_html(self.spam_data, '.*Water.*', header=1,
index_col=0)
df2 = self.read_html(self.spam_data, 'Unit', header=1, index_col=0)
assert_framelist_equal(df1, df2)
def test_header_and_index_with_types(self):
df1 = self.read_html(self.spam_data, '.*Water.*', header=1,
index_col=0)
df2 = self.read_html(self.spam_data, 'Unit', header=1, index_col=0)
assert_framelist_equal(df1, df2)
def test_infer_types(self):
# 10892 infer_types removed
df1 = self.read_html(self.spam_data, '.*Water.*', index_col=0)
df2 = self.read_html(self.spam_data, 'Unit', index_col=0)
assert_framelist_equal(df1, df2)
def test_string_io(self):
with open(self.spam_data, **self.spam_data_kwargs) as f:
data1 = StringIO(f.read())
with open(self.spam_data, **self.spam_data_kwargs) as f:
data2 = StringIO(f.read())
df1 = self.read_html(data1, '.*Water.*')
df2 = self.read_html(data2, 'Unit')
assert_framelist_equal(df1, df2)
def test_string(self):
with open(self.spam_data, **self.spam_data_kwargs) as f:
data = f.read()
df1 = self.read_html(data, '.*Water.*')
df2 = self.read_html(data, 'Unit')
assert_framelist_equal(df1, df2)
def test_file_like(self):
with open(self.spam_data, **self.spam_data_kwargs) as f:
df1 = self.read_html(f, '.*Water.*')
with open(self.spam_data, **self.spam_data_kwargs) as f:
df2 = self.read_html(f, 'Unit')
assert_framelist_equal(df1, df2)
@network
def test_bad_url_protocol(self):
with pytest.raises(URLError):
self.read_html('git://github.com', match='.*Water.*')
@network
def test_invalid_url(self):
try:
with pytest.raises(URLError):
self.read_html('http://www.a23950sdfa908sd.com',
match='.*Water.*')
except ValueError as e:
assert str(e) == 'No tables found'
@tm.slow
def test_file_url(self):
url = self.banklist_data
dfs = self.read_html(file_path_to_url(url), 'First',
attrs={'id': 'table'})
assert isinstance(dfs, list)
for df in dfs:
assert isinstance(df, DataFrame)
@tm.slow
def test_invalid_table_attrs(self):
url = self.banklist_data
with tm.assert_raises_regex(ValueError, 'No tables found'):
self.read_html(url, 'First Federal Bank of Florida',
attrs={'id': 'tasdfable'})
def _bank_data(self, *args, **kwargs):
return self.read_html(self.banklist_data, 'Metcalf',
attrs={'id': 'table'}, *args, **kwargs)
@tm.slow
def test_multiindex_header(self):
df = self._bank_data(header=[0, 1])[0]
assert isinstance(df.columns, MultiIndex)
@tm.slow
def test_multiindex_index(self):
df = self._bank_data(index_col=[0, 1])[0]
assert isinstance(df.index, MultiIndex)
@tm.slow
def test_multiindex_header_index(self):
df = self._bank_data(header=[0, 1], index_col=[0, 1])[0]
assert isinstance(df.columns, MultiIndex)
assert isinstance(df.index, MultiIndex)
@tm.slow
def test_multiindex_header_skiprows_tuples(self):
df = self._bank_data(header=[0, 1], skiprows=1, tupleize_cols=True)[0]
assert isinstance(df.columns, Index)
@tm.slow
def test_multiindex_header_skiprows(self):
df = self._bank_data(header=[0, 1], skiprows=1)[0]
assert isinstance(df.columns, MultiIndex)
@tm.slow
def test_multiindex_header_index_skiprows(self):
df = self._bank_data(header=[0, 1], index_col=[0, 1], skiprows=1)[0]
assert isinstance(df.index, MultiIndex)
assert isinstance(df.columns, MultiIndex)
@tm.slow
def test_regex_idempotency(self):
url = self.banklist_data
dfs = self.read_html(file_path_to_url(url),
match=re.compile(re.compile('Florida')),
attrs={'id': 'table'})
assert isinstance(dfs, list)
for df in dfs:
assert isinstance(df, DataFrame)
def test_negative_skiprows(self):
with tm.assert_raises_regex(ValueError,
r'\(you passed a negative value\)'):
self.read_html(self.spam_data, 'Water', skiprows=-1)
@network
def test_multiple_matches(self):
url = 'https://docs.python.org/2/'
dfs = self.read_html(url, match='Python')
assert len(dfs) > 1
@network
def test_python_docs_table(self):
url = 'https://docs.python.org/2/'
dfs = self.read_html(url, match='Python')
zz = [df.iloc[0, 0][0:4] for df in dfs]
assert sorted(zz) == sorted(['Repo', 'What'])
@tm.slow
def test_thousands_macau_stats(self):
all_non_nan_table_index = -2
macau_data = os.path.join(DATA_PATH, 'macau.html')
dfs = self.read_html(macau_data, index_col=0,
attrs={'class': 'style1'})
df = dfs[all_non_nan_table_index]
assert not any(s.isnull().any() for _, s in df.iteritems())
@tm.slow
def test_thousands_macau_index_col(self):
all_non_nan_table_index = -2
macau_data = os.path.join(DATA_PATH, 'macau.html')
dfs = self.read_html(macau_data, index_col=0, header=0)
df = dfs[all_non_nan_table_index]
assert not any(s.isnull().any() for _, s in df.iteritems())
def test_empty_tables(self):
"""
Make sure that read_html ignores empty tables.
"""
data1 = '''<table>
<thead>
<tr>
<th>A</th>
<th>B</th>
</tr>
</thead>
<tbody>
<tr>
<td>1</td>
<td>2</td>
</tr>
</tbody>
</table>'''
data2 = data1 + '''<table>
<tbody>
</tbody>
</table>'''
res1 = self.read_html(StringIO(data1))
res2 = self.read_html(StringIO(data2))
assert_framelist_equal(res1, res2)
def test_header_and_one_column(self):
"""
Don't fail with bs4 when there is a header and only one column
as described in issue #9178
"""
data = StringIO('''<html>
<body>
<table>
<thead>
<tr>
<th>Header</th>
</tr>
</thead>
<tbody>
<tr>
<td>first</td>
</tr>
</tbody>
</table>
</body>
</html>''')
expected = DataFrame(data={'Header': 'first'}, index=[0])
result = self.read_html(data)[0]
tm.assert_frame_equal(result, expected)
def test_tfoot_read(self):
"""
Make sure that read_html reads tfoot, containing td or th.
Ignores empty tfoot
"""
data_template = '''<table>
<thead>
<tr>
<th>A</th>
<th>B</th>
</tr>
</thead>
<tbody>
<tr>
<td>bodyA</td>
<td>bodyB</td>
</tr>
</tbody>
<tfoot>
{footer}
</tfoot>
</table>'''
data1 = data_template.format(footer="")
data2 = data_template.format(
footer="<tr><td>footA</td><th>footB</th></tr>")
d1 = {'A': ['bodyA'], 'B': ['bodyB']}
d2 = {'A': ['bodyA', 'footA'], 'B': ['bodyB', 'footB']}
tm.assert_frame_equal(self.read_html(data1)[0], DataFrame(d1))
tm.assert_frame_equal(self.read_html(data2)[0], DataFrame(d2))
def test_countries_municipalities(self):
# GH5048
data1 = StringIO('''<table>
<thead>
<tr>
<th>Country</th>
<th>Municipality</th>
<th>Year</th>
</tr>
</thead>
<tbody>
<tr>
<td>Ukraine</td>
<th>Odessa</th>
<td>1944</td>
</tr>
</tbody>
</table>''')
data2 = StringIO('''
<table>
<tbody>
<tr>
<th>Country</th>
<th>Municipality</th>
<th>Year</th>
</tr>
<tr>
<td>Ukraine</td>
<th>Odessa</th>
<td>1944</td>
</tr>
</tbody>
</table>''')
res1 = self.read_html(data1)
res2 = self.read_html(data2, header=0)
assert_framelist_equal(res1, res2)
def test_nyse_wsj_commas_table(self):
data = os.path.join(DATA_PATH, 'nyse_wsj.html')
df = self.read_html(data, index_col=0, header=0,
attrs={'class': 'mdcTable'})[0]
columns = Index(['Issue(Roll over for charts and headlines)',
'Volume', 'Price', 'Chg', '% Chg'])
nrows = 100
assert df.shape[0] == nrows
tm.assert_index_equal(df.columns, columns)
@tm.slow
def test_banklist_header(self):
from pandas.io.html import _remove_whitespace
def try_remove_ws(x):
try:
return _remove_whitespace(x)
except AttributeError:
return x
df = self.read_html(self.banklist_data, 'Metcalf',
attrs={'id': 'table'})[0]
ground_truth = read_csv(os.path.join(DATA_PATH, 'banklist.csv'),
converters={'Updated Date': Timestamp,
'Closing Date': Timestamp})
assert df.shape == ground_truth.shape
old = ['First Vietnamese American BankIn Vietnamese',
'Westernbank Puerto RicoEn Espanol',
'R-G Premier Bank of Puerto RicoEn Espanol',
'EurobankEn Espanol', 'Sanderson State BankEn Espanol',
'Washington Mutual Bank(Including its subsidiary Washington '
'Mutual Bank FSB)',
'Silver State BankEn Espanol',
'AmTrade International BankEn Espanol',
'Hamilton Bank, NAEn Espanol',
'The Citizens Savings BankPioneer Community Bank, Inc.']
new = ['First Vietnamese American Bank', 'Westernbank Puerto Rico',
'R-G Premier Bank of Puerto Rico', 'Eurobank',
'Sanderson State Bank', 'Washington Mutual Bank',
'Silver State Bank', 'AmTrade International Bank',
'Hamilton Bank, NA', 'The Citizens Savings Bank']
dfnew = df.applymap(try_remove_ws).replace(old, new)
gtnew = ground_truth.applymap(try_remove_ws)
converted = dfnew._convert(datetime=True, numeric=True)
date_cols = ['Closing Date', 'Updated Date']
converted[date_cols] = converted[date_cols]._convert(datetime=True,
coerce=True)
tm.assert_frame_equal(converted, gtnew)
@tm.slow
def test_gold_canyon(self):
gc = 'Gold Canyon'
with open(self.banklist_data, 'r') as f:
raw_text = f.read()
assert gc in raw_text
df = self.read_html(self.banklist_data, 'Gold Canyon',
attrs={'id': 'table'})[0]
assert gc in df.to_string()
def test_different_number_of_rows(self):
expected = """<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>C_l0_g0</th>
<th>C_l0_g1</th>
<th>C_l0_g2</th>
<th>C_l0_g3</th>
<th>C_l0_g4</th>
</tr>
</thead>
<tbody>
<tr>
<th>R_l0_g0</th>
<td> 0.763</td>
<td> 0.233</td>
<td> nan</td>
<td> nan</td>
<td> nan</td>
</tr>
<tr>
<th>R_l0_g1</th>
<td> 0.244</td>
<td> 0.285</td>
<td> 0.392</td>
<td> 0.137</td>
<td> 0.222</td>
</tr>
</tbody>
</table>"""
out = """<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>C_l0_g0</th>
<th>C_l0_g1</th>
<th>C_l0_g2</th>
<th>C_l0_g3</th>
<th>C_l0_g4</th>
</tr>
</thead>
<tbody>
<tr>
<th>R_l0_g0</th>
<td> 0.763</td>
<td> 0.233</td>
</tr>
<tr>
<th>R_l0_g1</th>
<td> 0.244</td>
<td> 0.285</td>
<td> 0.392</td>
<td> 0.137</td>
<td> 0.222</td>
</tr>
</tbody>
</table>"""
expected = self.read_html(expected, index_col=0)[0]
res = self.read_html(out, index_col=0)[0]
tm.assert_frame_equal(expected, res)
def test_parse_dates_list(self):
df = DataFrame({'date': date_range('1/1/2001', periods=10)})
expected = df.to_html()
res = self.read_html(expected, parse_dates=[1], index_col=0)
tm.assert_frame_equal(df, res[0])
res = self.read_html(expected, parse_dates=['date'], index_col=0)
tm.assert_frame_equal(df, res[0])
def test_parse_dates_combine(self):
raw_dates = Series(date_range('1/1/2001', periods=10))
df = DataFrame({'date': raw_dates.map(lambda x: str(x.date())),
'time': raw_dates.map(lambda x: str(x.time()))})
res = self.read_html(df.to_html(), parse_dates={'datetime': [1, 2]},
index_col=1)
newdf = DataFrame({'datetime': raw_dates})
tm.assert_frame_equal(newdf, res[0])
def test_computer_sales_page(self):
data = os.path.join(DATA_PATH, 'computer_sales_page.html')
with tm.assert_raises_regex(ParserError,
r"Passed header=\[0,1\] are "
r"too many rows for this "
r"multi_index of columns"):
self.read_html(data, header=[0, 1])
def test_wikipedia_states_table(self):
data = os.path.join(DATA_PATH, 'wikipedia_states.html')
assert os.path.isfile(data), '%r is not a file' % data
assert os.path.getsize(data), '%r is an empty file' % data
result = self.read_html(data, 'Arizona', header=1)[0]
assert result['sq mi'].dtype == np.dtype('float64')
def test_decimal_rows(self):
# GH 12907
data = StringIO('''<html>
<body>
<table>
<thead>
<tr>
<th>Header</th>
</tr>
</thead>
<tbody>
<tr>
<td>1100#101</td>
</tr>
</tbody>
</table>
</body>
</html>''')
expected = DataFrame(data={'Header': 1100.101}, index=[0])
result = self.read_html(data, decimal='#')[0]
assert result['Header'].dtype == np.dtype('float64')
tm.assert_frame_equal(result, expected)
def test_bool_header_arg(self):
# GH 6114
for arg in [True, False]:
with pytest.raises(TypeError):
read_html(self.spam_data, header=arg)
def test_converters(self):
# GH 13461
html_data = """<table>
<thead>
<th>a</th>
</tr>
</thead>
<tbody>
<tr>
<td> 0.763</td>
</tr>
<tr>
<td> 0.244</td>
</tr>
</tbody>
</table>"""
expected_df = DataFrame({'a': ['0.763', '0.244']})
html_df = read_html(html_data, converters={'a': str})[0]
tm.assert_frame_equal(expected_df, html_df)
def test_na_values(self):
# GH 13461
html_data = """<table>
<thead>
<th>a</th>
</tr>
</thead>
<tbody>
<tr>
<td> 0.763</td>
</tr>
<tr>
<td> 0.244</td>
</tr>
</tbody>
</table>"""
expected_df = DataFrame({'a': [0.763, np.nan]})
html_df = read_html(html_data, na_values=[0.244])[0]
tm.assert_frame_equal(expected_df, html_df)
def test_keep_default_na(self):
html_data = """<table>
<thead>
<th>a</th>
</tr>
</thead>
<tbody>
<tr>
<td> N/A</td>
</tr>
<tr>
<td> NA</td>
</tr>
</tbody>
</table>"""
expected_df = DataFrame({'a': ['N/A', 'NA']})
html_df = read_html(html_data, keep_default_na=False)[0]
tm.assert_frame_equal(expected_df, html_df)
expected_df = DataFrame({'a': [np.nan, np.nan]})
html_df = read_html(html_data, keep_default_na=True)[0]
tm.assert_frame_equal(expected_df, html_df)
def test_multiple_header_rows(self):
# Issue #13434
expected_df = DataFrame(data=[("Hillary", 68, "D"),
("Bernie", 74, "D"),
("Donald", 69, "R")])
expected_df.columns = [["Unnamed: 0_level_0", "Age", "Party"],
["Name", "Unnamed: 1_level_1",
"Unnamed: 2_level_1"]]
html = expected_df.to_html(index=False)
html_df = read_html(html, )[0]
tm.assert_frame_equal(expected_df, html_df)
def _lang_enc(filename):
return os.path.splitext(os.path.basename(filename))[0].split('_')
class TestReadHtmlEncoding(object):
files = glob.glob(os.path.join(DATA_PATH, 'html_encoding', '*.html'))
flavor = 'bs4'
@classmethod
def setup_class(cls):
_skip_if_none_of((cls.flavor, 'html5lib'))
def read_html(self, *args, **kwargs):
kwargs['flavor'] = self.flavor
return read_html(*args, **kwargs)
def read_filename(self, f, encoding):
return self.read_html(f, encoding=encoding, index_col=0)
def read_file_like(self, f, encoding):
with open(f, 'rb') as fobj:
return self.read_html(BytesIO(fobj.read()), encoding=encoding,
index_col=0)
def read_string(self, f, encoding):
with open(f, 'rb') as fobj:
return self.read_html(fobj.read(), encoding=encoding, index_col=0)
def test_encode(self):
assert self.files, 'no files read from the data folder'
for f in self.files:
_, encoding = _lang_enc(f)
try:
from_string = self.read_string(f, encoding).pop()
from_file_like = self.read_file_like(f, encoding).pop()
from_filename = self.read_filename(f, encoding).pop()
tm.assert_frame_equal(from_string, from_file_like)
tm.assert_frame_equal(from_string, from_filename)
except Exception:
# seems utf-16/32 fail on windows
if is_platform_windows():
if '16' in encoding or '32' in encoding:
continue
raise
class TestReadHtmlEncodingLxml(TestReadHtmlEncoding):
flavor = 'lxml'
@classmethod
def setup_class(cls):
super(TestReadHtmlEncodingLxml, cls).setup_class()
_skip_if_no(cls.flavor)
class TestReadHtmlLxml(ReadHtmlMixin):
flavor = 'lxml'
@classmethod
def setup_class(cls):
_skip_if_no('lxml')
def test_data_fail(self):
from lxml.etree import XMLSyntaxError
spam_data = os.path.join(DATA_PATH, 'spam.html')
banklist_data = os.path.join(DATA_PATH, 'banklist.html')
with pytest.raises(XMLSyntaxError):
self.read_html(spam_data)
with pytest.raises(XMLSyntaxError):
self.read_html(banklist_data)
def test_works_on_valid_markup(self):
filename = os.path.join(DATA_PATH, 'valid_markup.html')
dfs = self.read_html(filename, index_col=0)
assert isinstance(dfs, list)
assert isinstance(dfs[0], DataFrame)
@tm.slow
def test_fallback_success(self):
_skip_if_none_of(('bs4', 'html5lib'))
banklist_data = os.path.join(DATA_PATH, 'banklist.html')
self.read_html(banklist_data, '.*Water.*', flavor=['lxml', 'html5lib'])
def test_parse_dates_list(self):
df = DataFrame({'date': date_range('1/1/2001', periods=10)})
expected = df.to_html()
res = self.read_html(expected, parse_dates=[1], index_col=0)
tm.assert_frame_equal(df, res[0])
res = self.read_html(expected, parse_dates=['date'], index_col=0)
tm.assert_frame_equal(df, res[0])
def test_parse_dates_combine(self):
raw_dates = Series(date_range('1/1/2001', periods=10))
df = DataFrame({'date': raw_dates.map(lambda x: str(x.date())),
'time': raw_dates.map(lambda x: str(x.time()))})
res = self.read_html(df.to_html(), parse_dates={'datetime': [1, 2]},
index_col=1)
newdf = DataFrame({'datetime': raw_dates})
tm.assert_frame_equal(newdf, res[0])
def test_computer_sales_page(self):
data = os.path.join(DATA_PATH, 'computer_sales_page.html')
self.read_html(data, header=[0, 1])
def test_invalid_flavor():
url = 'google.com'
with pytest.raises(ValueError):
read_html(url, 'google', flavor='not a* valid**++ flaver')
def get_elements_from_file(url, element='table'):
_skip_if_none_of(('bs4', 'html5lib'))
url = file_path_to_url(url)
from bs4 import BeautifulSoup
with urlopen(url) as f:
soup = BeautifulSoup(f, features='html5lib')
return soup.find_all(element)
@tm.slow
def test_bs4_finds_tables():
filepath = os.path.join(DATA_PATH, "spam.html")
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
assert get_elements_from_file(filepath, 'table')
def get_lxml_elements(url, element):
_skip_if_no('lxml')
from lxml.html import parse
doc = parse(url)
return doc.xpath('.//{0}'.format(element))
@tm.slow
def test_lxml_finds_tables():
filepath = os.path.join(DATA_PATH, "spam.html")
assert get_lxml_elements(filepath, 'table')
@tm.slow
def test_lxml_finds_tbody():
filepath = os.path.join(DATA_PATH, "spam.html")
assert get_lxml_elements(filepath, 'tbody')
def test_same_ordering():
_skip_if_none_of(['bs4', 'lxml', 'html5lib'])
filename = os.path.join(DATA_PATH, 'valid_markup.html')
dfs_lxml = read_html(filename, index_col=0, flavor=['lxml'])
dfs_bs4 = read_html(filename, index_col=0, flavor=['bs4'])
assert_framelist_equal(dfs_lxml, dfs_bs4)
| mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/pandas/tests/io/test_html.py | Python | mit | 33,092 | 0 |
class Solution(object):
def strStr(self, haystack, needle):
"""
:type haystack: str
:type needle: str
:rtype: int
"""
for i in range(len(haystack)-len(needle) + 1):
if haystack[i: i + len(needle)] == needle:
return i
return -1
| scream7/leetcode | algorithms/python/28.py | Python | apache-2.0 | 315 | 0 |
import logging
from ..models import Activity
from .date import activity_stream_date_to_datetime, datetime_to_string
log = logging.getLogger(__name__)
def activity_from_dict(data):
log.debug("Converting YouTube dict to Activity Model")
activity_dict = activity_dict_from_dict(data)
return Activity.from_activity_dict(activity_dict)
def activity_dict_from_dict(blob):
log.debug("Converting YouTube dict to activity dict: %s", blob)
stream_object = {}
stream_object["@context"] = "http://www.w3.org/ns/activitystreams"
stream_object["@type"] = "Activity"
date = blob.get("snippet").get("publishedAt")
date = activity_stream_date_to_datetime(date)
stream_object["published"] = datetime_to_string(date)
stream_object["provider"] = {
"@type": "Service",
"displayName": "YouTube"
}
snippet = blob.get("snippet")
stream_object["actor"] = {
"@type": "Person",
"@id": "https://www.youtube.com/user/{}".format(snippet.get("channelTitle")),
"displayName": snippet.get("channelTitle"),
}
stream_object["object"] = {
"@id": "https://www.youtube.com/watch?v={}".format(blob.get("id").get("videoId")),
"@type": "Video",
"displayName": snippet.get("title"),
"url": [{
"href": "https://www.youtube.com/watch?v={}".format(blob.get("id").get("videoId")),
"@type": "Link"
}],
"content": snippet.get("description"),
"youtube:etag": blob.get("etag"),
"youtube:kind": blob.get("kind"),
"youtube:id:kind": blob.get("id").get("kind"),
"youtube:channelId": snippet.get("channelId"),
"youtube:liveBroadcastContent": snippet.get("liveBroadcastContent"),
"image": [
{
"@type": "Link",
"href": snippet.get("thumbnails").get("default").get("url"),
"mediaType": "image/jpeg",
"youtube:resolution": "default"
},
{
"@type": "Link",
"href": snippet.get("thumbnails").get("medium").get("url"),
"mediaType": "image/jpeg",
"youtube:resolution": "medium"
},
{
"@type": "Link",
"href": snippet.get("thumbnails").get("high").get("url"),
"mediaType": "image/jpeg",
"youtube:resolution": "high"
},
]
}
return stream_object
"""
"""
"""
{
"@context": "http://www.w3.org/ns/activitystreams",
"@type": "Activity", ------ Abstract wrapper
"published": "2015-02-10T15:04:55Z",
"provider": {
"@type": "Service",
"displayName": "Twitter|FaceBook|Instagram|YouTube"
},
"actor": {
"@type": "Person",
"@id": "https://www.twitter.com/{{user.screen_name}}
"displayName": "Martin Smith",
"url": "http://example.org/martin",
"image": {
"@type": "Link",
"href": "http://example.org/martin/image.jpg",
"mediaType": "image/jpeg"
}
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Note",
"url": "http://example.org/blog/2011/02/entry",
"content": "This is a short note"
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Video",
"displayName": "A Simple Video",
"url": "http://example.org/video.mkv",
"duration": "PT2H"
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Image",
"displayName": "A Simple Image",
"content": "any messages?"
"url": [
{
"@type": "Link",
"href": "http://example.org/image.jpeg",
"mediaType": "image/jpeg"
},
{
"@type": "Link",
"href": "http://example.org/image.png",
"mediaType": "image/png"
}
]
},
}
"""
| blitzagency/django-chatterbox | chatterbox/utils/youtube.py | Python | mit | 4,331 | 0.000693 |
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 7 13:10:05 2016
@author: thasegawa
"""
import os
import pandas as pd
economic_list = list(pd.read_excel('data\\fields\\economicIndicators_Real.xlsx', header=None)[0])
#fuel_list = list(pd.read_excel('data\\fields\\fuel_binary.xlsx', header=None)[0]) + [None]
fuel_list = list(pd.read_excel('data\\fields\\fuel_binary.xlsx', header=None)[0])
# Iterate through each regression result and retrieve R^2 and coefficient
group_list = ['pathmid',
'pathnj',
'pathnyc',
'pathtotal',
'pathwtc']
path = 'data\\regress_out\\all_v2'
outcol_list = ['PATH Group',
'R^2',
'Elasticity Coefficient',
'Economic Variable',
'Economic Coefficient',
'Fuel Variable',
'Fuel Coefficient',
'M1 Coefficient',
'M2 Coefficient',
'M3 Coefficient',
'M4 Coefficient',
'M5 Coefficient',
'M6 Coefficient',
'M7 Coefficient',
'M8 Coefficient',
'M9 Coefficient',
'M10 Coefficient',
'M11 Coefficient',
'Recession_FRED Coefficient',
'Sandy Coefficient',
'Snow_Median Coefficient',
'Intercept']
out_dict = {key: [] for key in outcol_list}
fname_list = os.listdir(path)
for index, group in enumerate(group_list):
R2_list = []
coef_list = []
for fuel in fuel_list:
for economic in economic_list:
fname = 'regress_summary_{0}_{1}.txt'.format(group, economic)
with open(os.path.join(path,fname)) as f:
lines = f.readlines()
for line in lines:
if line[:9] == 'R-squared':
R2 = float(line.strip().split(' ')[-1])
linesplit = line.split(' ')
if (len(linesplit) > 2):
if (linesplit[1] == 'Fare-1Trip'):
coef = float(linesplit[2])
if R2 is not None:
R2_list.append(R2)
else:
R2_list.append(-999)
if coef is not None:
coef_list.append(coef)
else:
coef_list.append(-999)
if index == 0:
R2_out = pd.DataFrame({'Economic Indicator': economic_list,
group: R2_list})
coef_out = pd.DataFrame({'Economic Indicator': economic_list,
group: coef_list})
else:
R2_out[group] = R2_list
coef_out[group] = coef_list
R2_out.to_excel('data\\regress_out\\regresssummary_R2.xlsx', index = False)
coef_out.to_excel('data\\regress_out\\regresssummary_coef.xlsx', index = False) | tzechiop/PANYNJ-Regression-Analysis-for-Toll-Traffic-Elasticity | mergeResults.py | Python | mit | 2,928 | 0.003415 |
# Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# snippet-sourcedescription:[MyCodeCommitFunction.py demonstrates how to use an AWS Lambda function to return the URLs used for cloning an AWS CodeCommit repository to a CloudWatch log.]
# snippet-service:[codecommit]
# snippet-keyword:[Python]
# snippet-sourcesyntax:[python]
# snippet-sourcesyntax:[python]
# snippet-keyword:[AWS CodeCommit]
# snippet-keyword:[Code Sample]
# snippet-keyword:[GetRepository]
# snippet-sourcetype:[full-example]
# snippet-sourceauthor:[AWS]
# snippet-sourcedate:[2016-03-07]
# snippet-start:[codecommit.python.MyCodeCommitFunction.complete]
import json
import boto3
codecommit = boto3.client('codecommit')
def lambda_handler(event, context):
#Log the updated references from the event
references = { reference['ref'] for reference in event['Records'][0]['codecommit']['references'] }
print("References: " + str(references))
#Get the repository from the event and show its git clone URL
repository = event['Records'][0]['eventSourceARN'].split(':')[5]
try:
response = codecommit.get_repository(repositoryName=repository)
print("Clone URL: " +response['repositoryMetadata']['cloneUrlHttp'])
return response['repositoryMetadata']['cloneUrlHttp']
except Exception as e:
print(e)
print('Error getting repository {}. Make sure it exists and that your repository is in the same region as this function.'.format(repository))
raise e
# snippet-end:[codecommit.python.MyCodeCommitFunction.complete]
| awsdocs/aws-doc-sdk-examples | lambda_functions/codecommit/MyCodeCommitFunction.py | Python | apache-2.0 | 2,083 | 0.005281 |
"""Facility to use the Expat parser to load a minidom instance
from a string or file.
This avoids all the overhead of SAX and pulldom to gain performance.
"""
# Warning!
#
# This module is tightly bound to the implementation details of the
# minidom DOM and can't be used with other DOM implementations. This
# is due, in part, to a lack of appropriate methods in the DOM (there is
# no way to create Entity and Notation nodes via the DOM Level 2
# interface), and for performance. The later is the cause of some fairly
# cryptic code.
#
# Performance hacks:
#
# - .character_data_handler() has an extra case in which continuing
# data is appended to an existing Text node; this can be a
# speedup since pyexpat can break up character data into multiple
# callbacks even though we set the buffer_text attribute on the
# parser. This also gives us the advantage that we don't need a
# separate normalization pass.
#
# - Determining that a node exists is done using an identity comparison
# with None rather than a truth test; this avoids searching for and
# calling any methods on the node object if it exists. (A rather
# nice speedup is achieved this way as well!)
from xml.dom import xmlbuilder, minidom, Node
from xml.dom import EMPTY_NAMESPACE, EMPTY_PREFIX, XMLNS_NAMESPACE
from xml.parsers import expat
from xml.dom.minidom import _append_child, _set_attribute_node
from xml.dom.NodeFilter import NodeFilter
from xml.dom.minicompat import *
TEXT_NODE = Node.TEXT_NODE
CDATA_SECTION_NODE = Node.CDATA_SECTION_NODE
DOCUMENT_NODE = Node.DOCUMENT_NODE
FILTER_ACCEPT = xmlbuilder.DOMBuilderFilter.FILTER_ACCEPT
FILTER_REJECT = xmlbuilder.DOMBuilderFilter.FILTER_REJECT
FILTER_SKIP = xmlbuilder.DOMBuilderFilter.FILTER_SKIP
FILTER_INTERRUPT = xmlbuilder.DOMBuilderFilter.FILTER_INTERRUPT
theDOMImplementation = minidom.getDOMImplementation()
# Expat typename -> TypeInfo
_typeinfo_map = {
"CDATA": minidom.TypeInfo(None, "cdata"),
"ENUM": minidom.TypeInfo(None, "enumeration"),
"ENTITY": minidom.TypeInfo(None, "entity"),
"ENTITIES": minidom.TypeInfo(None, "entities"),
"ID": minidom.TypeInfo(None, "id"),
"IDREF": minidom.TypeInfo(None, "idref"),
"IDREFS": minidom.TypeInfo(None, "idrefs"),
"NMTOKEN": minidom.TypeInfo(None, "nmtoken"),
"NMTOKENS": minidom.TypeInfo(None, "nmtokens"),
}
class ElementInfo(object):
__slots__ = '_attr_info', '_model', 'tagName'
def __init__(self, tagName, model=None):
self.tagName = tagName
self._attr_info = []
self._model = model
def __getstate__(self):
return self._attr_info, self._model, self.tagName
def __setstate__(self, state):
self._attr_info, self._model, self.tagName = state
def getAttributeType(self, aname):
for info in self._attr_info:
if info[1] == aname:
t = info[-2]
if t[0] == "(":
return _typeinfo_map["ENUM"]
else:
return _typeinfo_map[info[-2]]
return minidom._no_type
def getAttributeTypeNS(self, namespaceURI, localName):
return minidom._no_type
def isElementContent(self):
if self._model:
type = self._model[0]
return type not in (expat.model.XML_CTYPE_ANY,
expat.model.XML_CTYPE_MIXED)
else:
return False
def isEmpty(self):
if self._model:
return self._model[0] == expat.model.XML_CTYPE_EMPTY
else:
return False
def isId(self, aname):
for info in self._attr_info:
if info[1] == aname:
return info[-2] == "ID"
return False
def isIdNS(self, euri, ename, auri, aname):
# not sure this is meaningful
return self.isId((auri, aname))
def _intern(builder, s):
return builder._intern_setdefault(s, s)
def _parse_ns_name(builder, name):
assert ' ' in name
parts = name.split(' ')
intern = builder._intern_setdefault
if len(parts) == 3:
uri, localname, prefix = parts
prefix = intern(prefix, prefix)
qname = "%s:%s" % (prefix, localname)
qname = intern(qname, qname)
localname = intern(localname, localname)
else:
uri, localname = parts
prefix = EMPTY_PREFIX
qname = localname = intern(localname, localname)
return intern(uri, uri), localname, prefix, qname
class ExpatBuilder:
"""Document builder that uses Expat to build a ParsedXML.DOM document
instance."""
def __init__(self, options=None):
if options is None:
options = xmlbuilder.Options()
self._options = options
if self._options.filter is not None:
self._filter = FilterVisibilityController(self._options.filter)
else:
self._filter = None
# This *really* doesn't do anything in this case, so
# override it with something fast & minimal.
self._finish_start_element = id
self._parser = None
self.reset()
def createParser(self):
"""Create a new parser object."""
return expat.ParserCreate()
def getParser(self):
"""Return the parser object, creating a new one if needed."""
if not self._parser:
self._parser = self.createParser()
self._intern_setdefault = self._parser.intern.setdefault
self._parser.buffer_text = True
self._parser.ordered_attributes = True
self._parser.specified_attributes = True
self.install(self._parser)
return self._parser
def reset(self):
"""Free all data structures used during DOM construction."""
self.document = theDOMImplementation.createDocument(
EMPTY_NAMESPACE, None, None)
self.curNode = self.document
self._elem_info = self.document._elem_info
self._cdata = False
def install(self, parser):
"""Install the callbacks needed to build the DOM into the parser."""
# This creates circular references!
parser.StartDoctypeDeclHandler = self.start_doctype_decl_handler
parser.StartElementHandler = self.first_element_handler
parser.EndElementHandler = self.end_element_handler
parser.ProcessingInstructionHandler = self.pi_handler
if self._options.entities:
parser.EntityDeclHandler = self.entity_decl_handler
parser.NotationDeclHandler = self.notation_decl_handler
if self._options.comments:
parser.CommentHandler = self.comment_handler
if self._options.cdata_sections:
parser.StartCdataSectionHandler = self.start_cdata_section_handler
parser.EndCdataSectionHandler = self.end_cdata_section_handler
parser.CharacterDataHandler = self.character_data_handler_cdata
else:
parser.CharacterDataHandler = self.character_data_handler
parser.ExternalEntityRefHandler = self.external_entity_ref_handler
parser.XmlDeclHandler = self.xml_decl_handler
parser.ElementDeclHandler = self.element_decl_handler
parser.AttlistDeclHandler = self.attlist_decl_handler
def parseFile(self, file):
"""Parse a document from a file object, returning the document
node."""
parser = self.getParser()
first_buffer = True
try:
while 1:
buffer = file.read(16*1024)
if not buffer:
break
parser.Parse(buffer, 0)
if first_buffer and self.document.documentElement:
self._setup_subset(buffer)
first_buffer = False
parser.Parse("", True)
except ParseEscape:
pass
doc = self.document
self.reset()
self._parser = None
return doc
def parseString(self, string):
"""Parse a document from a string, returning the document node."""
parser = self.getParser()
try:
parser.Parse(string, True)
self._setup_subset(string)
except ParseEscape:
pass
doc = self.document
self.reset()
self._parser = None
return doc
def _setup_subset(self, buffer):
"""Load the internal subset if there might be one."""
if self.document.doctype:
extractor = InternalSubsetExtractor()
extractor.parseString(buffer)
subset = extractor.getSubset()
self.document.doctype.internalSubset = subset
def start_doctype_decl_handler(self, doctypeName, systemId, publicId,
has_internal_subset):
doctype = self.document.implementation.createDocumentType(
doctypeName, publicId, systemId)
doctype.ownerDocument = self.document
_append_child(self.document, doctype)
self.document.doctype = doctype
if self._filter and self._filter.acceptNode(doctype) == FILTER_REJECT:
self.document.doctype = None
del self.document.childNodes[-1]
doctype = None
self._parser.EntityDeclHandler = None
self._parser.NotationDeclHandler = None
if has_internal_subset:
if doctype is not None:
doctype.entities._seq = []
doctype.notations._seq = []
self._parser.CommentHandler = None
self._parser.ProcessingInstructionHandler = None
self._parser.EndDoctypeDeclHandler = self.end_doctype_decl_handler
def end_doctype_decl_handler(self):
if self._options.comments:
self._parser.CommentHandler = self.comment_handler
self._parser.ProcessingInstructionHandler = self.pi_handler
if not (self._elem_info or self._filter):
self._finish_end_element = id
def pi_handler(self, target, data):
node = self.document.createProcessingInstruction(target, data)
_append_child(self.curNode, node)
if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
self.curNode.removeChild(node)
def character_data_handler_cdata(self, data):
childNodes = self.curNode.childNodes
if self._cdata:
if ( self._cdata_continue
and childNodes[-1].nodeType == CDATA_SECTION_NODE):
childNodes[-1].appendData(data)
return
node = self.document.createCDATASection(data)
self._cdata_continue = True
elif childNodes and childNodes[-1].nodeType == TEXT_NODE:
node = childNodes[-1]
value = node.data + data
d = node.__dict__
d['data'] = d['nodeValue'] = value
return
else:
node = minidom.Text()
d = node.__dict__
d['data'] = d['nodeValue'] = data
d['ownerDocument'] = self.document
_append_child(self.curNode, node)
def character_data_handler(self, data):
childNodes = self.curNode.childNodes
if childNodes and childNodes[-1].nodeType == TEXT_NODE:
node = childNodes[-1]
d = node.__dict__
d['data'] = d['nodeValue'] = node.data + data
return
node = minidom.Text()
d = node.__dict__
d['data'] = d['nodeValue'] = node.data + data
d['ownerDocument'] = self.document
_append_child(self.curNode, node)
def entity_decl_handler(self, entityName, is_parameter_entity, value,
base, systemId, publicId, notationName):
if is_parameter_entity:
# we don't care about parameter entities for the DOM
return
if not self._options.entities:
return
node = self.document._create_entity(entityName, publicId,
systemId, notationName)
if value is not None:
# internal entity
# node *should* be readonly, but we'll cheat
child = self.document.createTextNode(value)
node.childNodes.append(child)
self.document.doctype.entities._seq.append(node)
if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
del self.document.doctype.entities._seq[-1]
def notation_decl_handler(self, notationName, base, systemId, publicId):
node = self.document._create_notation(notationName, publicId, systemId)
self.document.doctype.notations._seq.append(node)
if self._filter and self._filter.acceptNode(node) == FILTER_ACCEPT:
del self.document.doctype.notations._seq[-1]
def comment_handler(self, data):
node = self.document.createComment(data)
_append_child(self.curNode, node)
if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
self.curNode.removeChild(node)
def start_cdata_section_handler(self):
self._cdata = True
self._cdata_continue = False
def end_cdata_section_handler(self):
self._cdata = False
self._cdata_continue = False
def external_entity_ref_handler(self, context, base, systemId, publicId):
return 1
def first_element_handler(self, name, attributes):
if self._filter is None and not self._elem_info:
self._finish_end_element = id
self.getParser().StartElementHandler = self.start_element_handler
self.start_element_handler(name, attributes)
def start_element_handler(self, name, attributes):
node = self.document.createElement(name)
_append_child(self.curNode, node)
self.curNode = node
if attributes:
for i in range(0, len(attributes), 2):
a = minidom.Attr(attributes[i], EMPTY_NAMESPACE,
None, EMPTY_PREFIX)
value = attributes[i+1]
d = a.childNodes[0].__dict__
d['data'] = d['nodeValue'] = value
d = a.__dict__
d['value'] = d['nodeValue'] = value
d['ownerDocument'] = self.document
_set_attribute_node(node, a)
if node is not self.document.documentElement:
self._finish_start_element(node)
def _finish_start_element(self, node):
if self._filter:
# To be general, we'd have to call isSameNode(), but this
# is sufficient for minidom:
if node is self.document.documentElement:
return
filt = self._filter.startContainer(node)
if filt == FILTER_REJECT:
# ignore this node & all descendents
Rejecter(self)
elif filt == FILTER_SKIP:
# ignore this node, but make it's children become
# children of the parent node
Skipper(self)
else:
return
self.curNode = node.parentNode
node.parentNode.removeChild(node)
node.unlink()
# If this ever changes, Namespaces.end_element_handler() needs to
# be changed to match.
#
def end_element_handler(self, name):
curNode = self.curNode
self.curNode = curNode.parentNode
self._finish_end_element(curNode)
def _finish_end_element(self, curNode):
info = self._elem_info.get(curNode.tagName)
if info:
self._handle_white_text_nodes(curNode, info)
if self._filter:
if curNode is self.document.documentElement:
return
if self._filter.acceptNode(curNode) == FILTER_REJECT:
self.curNode.removeChild(curNode)
curNode.unlink()
def _handle_white_text_nodes(self, node, info):
if (self._options.whitespace_in_element_content
or not info.isElementContent()):
return
# We have element type information and should remove ignorable
# whitespace; identify for text nodes which contain only
# whitespace.
L = []
for child in node.childNodes:
if child.nodeType == TEXT_NODE and not child.data.strip():
L.append(child)
# Remove ignorable whitespace from the tree.
for child in L:
node.removeChild(child)
def element_decl_handler(self, name, model):
info = self._elem_info.get(name)
if info is None:
self._elem_info[name] = ElementInfo(name, model)
else:
assert info._model is None
info._model = model
def attlist_decl_handler(self, elem, name, type, default, required):
info = self._elem_info.get(elem)
if info is None:
info = ElementInfo(elem)
self._elem_info[elem] = info
info._attr_info.append(
[None, name, None, None, default, 0, type, required])
def xml_decl_handler(self, version, encoding, standalone):
self.document.version = version
self.document.encoding = encoding
# This is still a little ugly, thanks to the pyexpat API. ;-(
if standalone >= 0:
if standalone:
self.document.standalone = True
else:
self.document.standalone = False
# Don't include FILTER_INTERRUPT, since that's checked separately
# where allowed.
_ALLOWED_FILTER_RETURNS = (FILTER_ACCEPT, FILTER_REJECT, FILTER_SKIP)
class FilterVisibilityController(object):
"""Wrapper around a DOMBuilderFilter which implements the checks
to make the whatToShow filter attribute work."""
__slots__ = 'filter',
def __init__(self, filter):
self.filter = filter
def startContainer(self, node):
mask = self._nodetype_mask[node.nodeType]
if self.filter.whatToShow & mask:
val = self.filter.startContainer(node)
if val == FILTER_INTERRUPT:
raise ParseEscape
if val not in _ALLOWED_FILTER_RETURNS:
raise ValueError, \
"startContainer() returned illegal value: " + repr(val)
return val
else:
return FILTER_ACCEPT
def acceptNode(self, node):
mask = self._nodetype_mask[node.nodeType]
if self.filter.whatToShow & mask:
val = self.filter.acceptNode(node)
if val == FILTER_INTERRUPT:
raise ParseEscape
if val == FILTER_SKIP:
# move all child nodes to the parent, and remove this node
parent = node.parentNode
for child in node.childNodes[:]:
parent.appendChild(child)
# node is handled by the caller
return FILTER_REJECT
if val not in _ALLOWED_FILTER_RETURNS:
raise ValueError, \
"acceptNode() returned illegal value: " + repr(val)
return val
else:
return FILTER_ACCEPT
_nodetype_mask = {
Node.ELEMENT_NODE: NodeFilter.SHOW_ELEMENT,
Node.ATTRIBUTE_NODE: NodeFilter.SHOW_ATTRIBUTE,
Node.TEXT_NODE: NodeFilter.SHOW_TEXT,
Node.CDATA_SECTION_NODE: NodeFilter.SHOW_CDATA_SECTION,
Node.ENTITY_REFERENCE_NODE: NodeFilter.SHOW_ENTITY_REFERENCE,
Node.ENTITY_NODE: NodeFilter.SHOW_ENTITY,
Node.PROCESSING_INSTRUCTION_NODE: NodeFilter.SHOW_PROCESSING_INSTRUCTION,
Node.COMMENT_NODE: NodeFilter.SHOW_COMMENT,
Node.DOCUMENT_NODE: NodeFilter.SHOW_DOCUMENT,
Node.DOCUMENT_TYPE_NODE: NodeFilter.SHOW_DOCUMENT_TYPE,
Node.DOCUMENT_FRAGMENT_NODE: NodeFilter.SHOW_DOCUMENT_FRAGMENT,
Node.NOTATION_NODE: NodeFilter.SHOW_NOTATION,
}
class FilterCrutch(object):
__slots__ = '_builder', '_level', '_old_start', '_old_end'
def __init__(self, builder):
self._level = 0
self._builder = builder
parser = builder._parser
self._old_start = parser.StartElementHandler
self._old_end = parser.EndElementHandler
parser.StartElementHandler = self.start_element_handler
parser.EndElementHandler = self.end_element_handler
class Rejecter(FilterCrutch):
__slots__ = ()
def __init__(self, builder):
FilterCrutch.__init__(self, builder)
parser = builder._parser
for name in ("ProcessingInstructionHandler",
"CommentHandler",
"CharacterDataHandler",
"StartCdataSectionHandler",
"EndCdataSectionHandler",
"ExternalEntityRefHandler",
):
setattr(parser, name, None)
def start_element_handler(self, *args):
self._level = self._level + 1
def end_element_handler(self, *args):
if self._level == 0:
# restore the old handlers
parser = self._builder._parser
self._builder.install(parser)
parser.StartElementHandler = self._old_start
parser.EndElementHandler = self._old_end
else:
self._level = self._level - 1
class Skipper(FilterCrutch):
__slots__ = ()
def start_element_handler(self, *args):
node = self._builder.curNode
self._old_start(*args)
if self._builder.curNode is not node:
self._level = self._level + 1
def end_element_handler(self, *args):
if self._level == 0:
# We're popping back out of the node we're skipping, so we
# shouldn't need to do anything but reset the handlers.
self._builder._parser.StartElementHandler = self._old_start
self._builder._parser.EndElementHandler = self._old_end
self._builder = None
else:
self._level = self._level - 1
self._old_end(*args)
# framework document used by the fragment builder.
# Takes a string for the doctype, subset string, and namespace attrs string.
_FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID = \
"http://xml.python.org/entities/fragment-builder/internal"
_FRAGMENT_BUILDER_TEMPLATE = (
'''\
<!DOCTYPE wrapper
%%s [
<!ENTITY fragment-builder-internal
SYSTEM "%s">
%%s
]>
<wrapper %%s
>&fragment-builder-internal;</wrapper>'''
% _FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID)
class FragmentBuilder(ExpatBuilder):
"""Builder which constructs document fragments given XML source
text and a context node.
The context node is expected to provide information about the
namespace declarations which are in scope at the start of the
fragment.
"""
def __init__(self, context, options=None):
if context.nodeType == DOCUMENT_NODE:
self.originalDocument = context
self.context = context
else:
self.originalDocument = context.ownerDocument
self.context = context
ExpatBuilder.__init__(self, options)
def reset(self):
ExpatBuilder.reset(self)
self.fragment = None
def parseFile(self, file):
"""Parse a document fragment from a file object, returning the
fragment node."""
return self.parseString(file.read())
def parseString(self, string):
"""Parse a document fragment from a string, returning the
fragment node."""
self._source = string
parser = self.getParser()
doctype = self.originalDocument.doctype
ident = ""
if doctype:
subset = doctype.internalSubset or self._getDeclarations()
if doctype.publicId:
ident = ('PUBLIC "%s" "%s"'
% (doctype.publicId, doctype.systemId))
elif doctype.systemId:
ident = 'SYSTEM "%s"' % doctype.systemId
else:
subset = ""
nsattrs = self._getNSattrs() # get ns decls from node's ancestors
document = _FRAGMENT_BUILDER_TEMPLATE % (ident, subset, nsattrs)
try:
parser.Parse(document, 1)
except:
self.reset()
raise
fragment = self.fragment
self.reset()
## self._parser = None
return fragment
def _getDeclarations(self):
"""Re-create the internal subset from the DocumentType node.
This is only needed if we don't already have the
internalSubset as a string.
"""
doctype = self.context.ownerDocument.doctype
s = ""
if doctype:
for i in range(doctype.notations.length):
notation = doctype.notations.item(i)
if s:
s = s + "\n "
s = "%s<!NOTATION %s" % (s, notation.nodeName)
if notation.publicId:
s = '%s PUBLIC "%s"\n "%s">' \
% (s, notation.publicId, notation.systemId)
else:
s = '%s SYSTEM "%s">' % (s, notation.systemId)
for i in range(doctype.entities.length):
entity = doctype.entities.item(i)
if s:
s = s + "\n "
s = "%s<!ENTITY %s" % (s, entity.nodeName)
if entity.publicId:
s = '%s PUBLIC "%s"\n "%s"' \
% (s, entity.publicId, entity.systemId)
elif entity.systemId:
s = '%s SYSTEM "%s"' % (s, entity.systemId)
else:
s = '%s "%s"' % (s, entity.firstChild.data)
if entity.notationName:
s = "%s NOTATION %s" % (s, entity.notationName)
s = s + ">"
return s
def _getNSattrs(self):
return ""
def external_entity_ref_handler(self, context, base, systemId, publicId):
if systemId == _FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID:
# this entref is the one that we made to put the subtree
# in; all of our given input is parsed in here.
old_document = self.document
old_cur_node = self.curNode
parser = self._parser.ExternalEntityParserCreate(context)
# put the real document back, parse into the fragment to return
self.document = self.originalDocument
self.fragment = self.document.createDocumentFragment()
self.curNode = self.fragment
try:
parser.Parse(self._source, 1)
finally:
self.curNode = old_cur_node
self.document = old_document
self._source = None
return -1
else:
return ExpatBuilder.external_entity_ref_handler(
self, context, base, systemId, publicId)
class Namespaces:
"""Mix-in class for builders; adds support for namespaces."""
def _initNamespaces(self):
# list of (prefix, uri) ns declarations. Namespace attrs are
# constructed from this and added to the element's attrs.
self._ns_ordered_prefixes = []
def createParser(self):
"""Create a new namespace-handling parser."""
parser = expat.ParserCreate(namespace_separator=" ")
parser.namespace_prefixes = True
return parser
def install(self, parser):
"""Insert the namespace-handlers onto the parser."""
ExpatBuilder.install(self, parser)
if self._options.namespace_declarations:
parser.StartNamespaceDeclHandler = (
self.start_namespace_decl_handler)
def start_namespace_decl_handler(self, prefix, uri):
"""Push this namespace declaration on our storage."""
self._ns_ordered_prefixes.append((prefix, uri))
def start_element_handler(self, name, attributes):
if ' ' in name:
uri, localname, prefix, qname = _parse_ns_name(self, name)
else:
uri = EMPTY_NAMESPACE
qname = name
localname = None
prefix = EMPTY_PREFIX
node = minidom.Element(qname, uri, prefix, localname)
node.ownerDocument = self.document
_append_child(self.curNode, node)
self.curNode = node
if self._ns_ordered_prefixes:
for prefix, uri in self._ns_ordered_prefixes:
if prefix:
a = minidom.Attr(_intern(self, 'xmlns:' + prefix),
XMLNS_NAMESPACE, prefix, "xmlns")
else:
a = minidom.Attr("xmlns", XMLNS_NAMESPACE,
"xmlns", EMPTY_PREFIX)
d = a.childNodes[0].__dict__
d['data'] = d['nodeValue'] = uri
d = a.__dict__
d['value'] = d['nodeValue'] = uri
d['ownerDocument'] = self.document
_set_attribute_node(node, a)
del self._ns_ordered_prefixes[:]
if attributes:
_attrs = node._attrs
_attrsNS = node._attrsNS
for i in range(0, len(attributes), 2):
aname = attributes[i]
value = attributes[i+1]
if ' ' in aname:
uri, localname, prefix, qname = _parse_ns_name(self, aname)
a = minidom.Attr(qname, uri, localname, prefix)
_attrs[qname] = a
_attrsNS[(uri, localname)] = a
else:
a = minidom.Attr(aname, EMPTY_NAMESPACE,
aname, EMPTY_PREFIX)
_attrs[aname] = a
_attrsNS[(EMPTY_NAMESPACE, aname)] = a
d = a.childNodes[0].__dict__
d['data'] = d['nodeValue'] = value
d = a.__dict__
d['ownerDocument'] = self.document
d['value'] = d['nodeValue'] = value
d['ownerElement'] = node
if __debug__:
# This only adds some asserts to the original
# end_element_handler(), so we only define this when -O is not
# used. If changing one, be sure to check the other to see if
# it needs to be changed as well.
#
def end_element_handler(self, name):
curNode = self.curNode
if ' ' in name:
uri, localname, prefix, qname = _parse_ns_name(self, name)
assert (curNode.namespaceURI == uri
and curNode.localName == localname
and curNode.prefix == prefix), \
"element stack messed up! (namespace)"
else:
assert curNode.nodeName == name, \
"element stack messed up - bad nodeName"
assert curNode.namespaceURI == EMPTY_NAMESPACE, \
"element stack messed up - bad namespaceURI"
self.curNode = curNode.parentNode
self._finish_end_element(curNode)
class ExpatBuilderNS(Namespaces, ExpatBuilder):
"""Document builder that supports namespaces."""
def reset(self):
ExpatBuilder.reset(self)
self._initNamespaces()
class FragmentBuilderNS(Namespaces, FragmentBuilder):
"""Fragment builder that supports namespaces."""
def reset(self):
FragmentBuilder.reset(self)
self._initNamespaces()
def _getNSattrs(self):
"""Return string of namespace attributes from this element and
ancestors."""
# XXX This needs to be re-written to walk the ancestors of the
# context to build up the namespace information from
# declarations, elements, and attributes found in context.
# Otherwise we have to store a bunch more data on the DOM
# (though that *might* be more reliable -- not clear).
attrs = ""
context = self.context
L = []
while context:
if hasattr(context, '_ns_prefix_uri'):
for prefix, uri in context._ns_prefix_uri.items():
# add every new NS decl from context to L and attrs string
if prefix in L:
continue
L.append(prefix)
if prefix:
declname = "xmlns:" + prefix
else:
declname = "xmlns"
if attrs:
attrs = "%s\n %s='%s'" % (attrs, declname, uri)
else:
attrs = " %s='%s'" % (declname, uri)
context = context.parentNode
return attrs
class ParseEscape(Exception):
"""Exception raised to short-circuit parsing in InternalSubsetExtractor."""
pass
class InternalSubsetExtractor(ExpatBuilder):
"""XML processor which can rip out the internal document type subset."""
subset = None
def getSubset(self):
"""Return the internal subset as a string."""
return self.subset
def parseFile(self, file):
try:
ExpatBuilder.parseFile(self, file)
except ParseEscape:
pass
def parseString(self, string):
try:
ExpatBuilder.parseString(self, string)
except ParseEscape:
pass
def install(self, parser):
parser.StartDoctypeDeclHandler = self.start_doctype_decl_handler
parser.StartElementHandler = self.start_element_handler
def start_doctype_decl_handler(self, name, publicId, systemId,
has_internal_subset):
if has_internal_subset:
parser = self.getParser()
self.subset = []
parser.DefaultHandler = self.subset.append
parser.EndDoctypeDeclHandler = self.end_doctype_decl_handler
else:
raise ParseEscape()
def end_doctype_decl_handler(self):
s = ''.join(self.subset).replace('\r\n', '\n').replace('\r', '\n')
self.subset = s
raise ParseEscape()
def start_element_handler(self, name, attrs):
raise ParseEscape()
def parse(file, namespaces=True):
"""Parse a document, returning the resulting Document node.
'file' may be either a file name or an open file object.
"""
if namespaces:
builder = ExpatBuilderNS()
else:
builder = ExpatBuilder()
if isinstance(file, StringTypes):
fp = open(file, 'rb')
try:
result = builder.parseFile(fp)
finally:
fp.close()
else:
result = builder.parseFile(file)
return result
def parseString(string, namespaces=True):
"""Parse a document from a string, returning the resulting
Document node.
"""
if namespaces:
builder = ExpatBuilderNS()
else:
builder = ExpatBuilder()
return builder.parseString(string)
def parseFragment(file, context, namespaces=True):
"""Parse a fragment of a document, given the context from which it
was originally extracted. context should be the parent of the
node(s) which are in the fragment.
'file' may be either a file name or an open file object.
"""
if namespaces:
builder = FragmentBuilderNS(context)
else:
builder = FragmentBuilder(context)
if isinstance(file, StringTypes):
fp = open(file, 'rb')
try:
result = builder.parseFile(fp)
finally:
fp.close()
else:
result = builder.parseFile(file)
return result
def parseFragmentString(string, context, namespaces=True):
"""Parse a fragment of a document from a string, given the context
from which it was originally extracted. context should be the
parent of the node(s) which are in the fragment.
"""
if namespaces:
builder = FragmentBuilderNS(context)
else:
builder = FragmentBuilder(context)
return builder.parseString(string)
def makeBuilder(options):
"""Create a builder based on an Options object."""
if options.namespaces:
return ExpatBuilderNS(options)
else:
return ExpatBuilder(options)
| huran2014/huran.github.io | wot_gateway/usr/lib/python2.7/xml/dom/expatbuilder.py | Python | gpl-2.0 | 36,382 | 0.00044 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-17 19:24
from __future__ import unicode_literals
import c3nav.mapdata.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0047_remove_mapupdate_changed_geometries'),
]
operations = [
migrations.CreateModel(
name='Ramp',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('minx', models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='min x coordinate')),
('miny', models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='min y coordinate')),
('maxx', models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='max x coordinate')),
('maxy', models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='max y coordinate')),
('geometry', c3nav.mapdata.fields.GeometryField(default=None, geomtype='polygon')),
('space', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ramps', to='mapdata.Space', verbose_name='space')),
],
options={
'verbose_name': 'Ramp',
'verbose_name_plural': 'Ramps',
'default_related_name': 'ramps',
},
),
]
| c3nav/c3nav | src/c3nav/mapdata/migrations/0048_ramp.py | Python | apache-2.0 | 1,532 | 0.004569 |
def _checkInput(index):
if index < 0:
raise ValueError("Indice negativo non supportato [{}]".format(index))
elif type(index) != int:
raise TypeError("Inserire un intero [tipo input {}]".format(type(index).__name__))
def fib_from_string(index):
_checkInput(index)
serie = "0 1 1 2 3 5 8".replace(" ", "")
return int(serie[index])
def fib_from_list(index):
_checkInput(index)
serie = [0,1,1,2,3,5,8]
return serie[index]
def fib_from_algo(index):
_checkInput(index)
current_number = current_index = 0
base = 1
while current_index < index:
old_base = current_number
current_number = current_number + base
base = old_base
current_index += 1
pass
return current_number
def recursion(index):
if index <= 1:
return index
return recursion(index - 1) + recursion(index - 2)
def fib_from_recursion_func(index):
_checkInput(index)
return recursion(index)
calculate = fib_from_recursion_func | feroda/lessons-python4beginners | students/2016-09-04/simone-cosma/fibonacci.py | Python | agpl-3.0 | 1,035 | 0.016425 |
#!/usr/bin/env python
import analyze_conf
import sys
import datetime, glob, job_stats, os, subprocess, time
import operator
import matplotlib
# Set the matplotlib output mode from config if it exists
if not 'matplotlib.pyplot' in sys.modules:
try:
matplotlib.use(analyze_conf.matplotlib_output_mode)
except NameError:
matplotlib.use('pdf')
import matplotlib.pyplot as plt
import numpy
import scipy, scipy.stats
import argparse
import tspl, tspl_utils, lariat_utils, plot
import math
import multiprocessing, functools, itertools
import cPickle as pickle
def do_work(file,mintime,wayness,lariat_dict):
retval=(None,None,None,None,None)
res=plot.get_data(file,mintime,wayness,lariat_dict)
if (res is None):
return retval
(ts, ld, tmid,
read_rate, write_rate, stall_rate, clock_rate, avx_rate, sse_rate, inst_rate,
meta_rate, l1_rate, l2_rate, l3_rate, load_rate, read_frac, stall_frac) = res
# return (scipy.stats.tmean(stall_frac),
# scipy.stats.tmean((load_rate - (l1_rate + l2_rate +
# l3_rate))/load_rate))
mean_mem_rate=scipy.stats.tmean(read_rate+write_rate)*64.0
ename=ld.exc.split('/')[-1]
ename=tspl_utils.string_shorten(ld.comp_name(ename,ld.equiv_patterns),8)
if ename=='unknown':
return retval
flag=False
if mean_mem_rate < 75.*1000000000./16.:
flag=True
return (scipy.stats.tmean(stall_frac),
scipy.stats.tmean((load_rate - (l1_rate))/load_rate),
scipy.stats.tmean(clock_rate/inst_rate),ename,
flag)
def main():
parser = argparse.ArgumentParser(description='Look for imbalance between'
'hosts for a pair of keys')
parser.add_argument('filearg', help='File, directory, or quoted'
' glob pattern', nargs='?',default='jobs')
parser.add_argument('-p', help='Set number of processes',
nargs=1, type=int, default=[1])
n=parser.parse_args()
filelist=tspl_utils.getfilelist(n.filearg)
procs = min(len(filelist),n.p[0])
job=pickle.load(open(filelist[0]))
jid=job.id
epoch=job.end_time
ld=lariat_utils.LariatData(jid,end_epoch=epoch,daysback=3,directory=analyze_conf.lariat_path)
if procs < 1:
print 'Must have at least one file'
exit(1)
pool = multiprocessing.Pool(processes=procs)
partial_work=functools.partial(do_work,mintime=3600.,wayness=16,lariat_dict=ld.ld)
results=pool.map(partial_work,filelist)
fig1,ax1=plt.subplots(1,1,figsize=(20,8),dpi=80)
fig2,ax2=plt.subplots(1,1,figsize=(20,8),dpi=80)
maxx=0.
for state in [ True, False ]:
stalls=[]
misses=[]
cpis=[]
enames=[]
for (s,m,cpi,ename,flag) in results:
if (s != None and m > 0. and m < 1.0 and flag==state):
stalls.extend([s])
misses.extend([m])
cpis.extend([cpi])
enames.extend([ename])
markers = itertools.cycle(('o','x','+','^','s','8','p',
'h','*','D','<','>','v','d','.'))
colors = itertools.cycle(('b','g','r','c','m','k','y'))
fmt={}
for e in enames:
if not e in fmt:
fmt[e]=markers.next()+colors.next()
for (s,c,e) in zip(stalls,cpis,enames):
# ax1.plot(numpy.log10(1.-(1.-s)),numpy.log10(c),
maxx=max(maxx,1./(1.-s))
ax1.plot((1./(1.-s)),(c),
marker=fmt[e][0],
markeredgecolor=fmt[e][1],
linestyle='', markerfacecolor='None',
label=e)
ax1.hold=True
ax2.plot((1./(1.-s)),(c),
marker=fmt[e][0],
markeredgecolor=fmt[e][1],
linestyle='', markerfacecolor='None',
label=e)
ax2.hold=True
#ax.plot(numpy.log10(stalls),numpy.log10(cpis),fmt)
#ax.plot(numpy.log10(1.0/(1.0-numpy.array(stalls))),numpy.log10(cpis),fmt)
ax1.set_xscale('log')
ax1.set_xlim(left=0.95,right=1.05*maxx)
ax1.set_yscale('log')
box = ax1.get_position()
ax1.set_position([box.x0, box.y0, box.width * 0.45, box.height])
box = ax2.get_position()
ax2.set_position([box.x0, box.y0, box.width * 0.45, box.height])
handles=[]
labels=[]
for h,l in zip(*ax1.get_legend_handles_labels()):
if l in labels:
continue
else:
handles.extend([h])
labels.extend([l])
ax1.legend(handles,labels,bbox_to_anchor=(1.05, 1),
loc=2, borderaxespad=0., numpoints=1,ncol=4)
ax1.set_xlabel('log(Cycles per Execution Cycle)')
ax1.set_ylabel('log(CPI)')
handles=[]
labels=[]
for h,l in zip(*ax2.get_legend_handles_labels()):
if l in labels:
continue
else:
handles.extend([h])
labels.extend([l])
ax2.legend(handles,labels,bbox_to_anchor=(1.05, 1),
loc=2, borderaxespad=0., numpoints=1,ncol=4)
ax2.set_xlabel('Cycles per Execution Cycle')
ax2.set_ylabel('CPI')
fname='miss_v_stall_log'
fig1.savefig(fname)
fname='miss_v_stall'
fig2.savefig(fname)
plt.close()
if __name__ == '__main__':
main()
| ubccr/tacc_stats | analyze/process_pickles/miss_vs_stall.py | Python | lgpl-2.1 | 5,040 | 0.044048 |
def populate(template, values):
# template is a string containing tags. the tags get replaced with the entries from the values dictionary.
# example:
# > template = "hello there <<your name>>!"
# > values = {"your name": "bukaroo banzai"}
# > populateTemplate( template, values)
# "hello there bukaroo banzai!"
result = template["text"]
name = "None"
try:
for name in template["parameters"]:
result = result.replace("<<%s>>" % name, str(values[name]))
except KeyError:
print "Template value dictionary is missing the entry:", name
return result
### dts file template
dtsContents = {
"parameters": ("type", "part number", "header names", "hardware names", "fragments"),
"text": """/*
* Easy <<type>> mux control of <<header names>> (<<hardware names>>)
*/
/dts-v1/;
/plugin/;
/ {
compatible = "ti,beaglebone", "ti,beaglebone-black";
/* identification */
part-number = "<<part number>>";
/* version = "00A0"; */
/* state the resources this cape uses */
exclusive-use =
/* the pin header uses */
<<header names>>,
/* the hardware IP uses */
<<hardware names>>;
<<fragments>>
};
"""
}
### fragment template
fragment = {
"parameters": ("type", "index", "header name", "clean header name", "state name", "offset and mux list"),
"text": """
/* <<state name>> state */
fragment@<<index>> {
target = <&am33xx_pinmux>;
__overlay__ {
<<type>>_<<clean header name>>_<<state name>>: pinmux_<<type>>_<<header name>>_<<state name>> {
pinctrl-single,pins = <
<<offset and mux list>>
>;
};
};
};
"""
}
### pinctrlTemplate template
pinctrl = {
"parameters": ("type", "index", "clean header name", "state name"),
"text": """pinctrl-<<index>> = <&<<type>>_<<clean header name>>_<<state name>>>;"""
}
pinmuxHelper = {
"parameters": ("type", "index", "header name", "state names list", "pinctrl list", "gpio index"),
"text": """
fragment@<<index>> {
target = <&ocp>;
__overlay__ {
<<type>>-<<header name>>_gpio<<gpio index>> {
compatible = "bone-pinmux-helper";
status = "okay";
pinctrl-names = <<state names list>>;
<<pinctrl list>>
};
};
};
"""
}
ledHelper = {
"parameters": ("index", "header name", "gpio bank + 1", "gpio pin", "output pinctrl entry"),
"text": """
fragment@<<index>> {
target = <&ocp>;
__overlay__ {
led_<<header name>>_helper {
compatible = "gpio-leds";
pinctrl-names = "default";
<<output pinctrl entry>>
leds-<<header name>> {
label = "leds:<<header name>>";
gpios = <&gpio<<gpio bank + 1>> <<gpio pin>> 0>;
linux,default-trigger = "none";
default-state = "off";
};
};
};
};
"""
}
prussHelper = {
"parameters": ("status", "index"),
"text": """
fragment@<<index>> {
target = <&pruss>;
__overlay__ {
status = "<<status>>";
};
};
"""
} | nomel/beaglebone | pru-gpio/templates.py | Python | unlicense | 3,354 | 0.004472 |
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields
class BomLine(Model):
_name = "bom.line"
_fields = {
"bom_id": fields.Many2One("bom", "BoM", required=True, on_delete="cascade"),
"product_id": fields.Many2One("product", "Product", required=True),
"qty": fields.Decimal("Qty", required=True, scale=6),
"uom_id": fields.Many2One("uom", "UoM", required=True),
"location_id": fields.Many2One("stock.location", "RM Warehouse"),
"container": fields.Selection([["sale", "From Sales Order"]], "RM Container"),
"lot": fields.Selection([["production", "From Production Order"]], "RM Lot"),
"issue_method": fields.Selection([["manual", "Manual"], ["backflush", "Backflush"]], "Issue Method"),
"qty2": fields.Decimal("Qty2", scale=6),
"notes": fields.Text("Notes"),
}
BomLine.register()
| sidzan/netforce | netforce_mfg/netforce_mfg/models/bom_line.py | Python | mit | 1,967 | 0.004575 |
#!/usr/bin/python
import re
userInput = raw_input("input equation\n")
numCount = 0
operandCount = 0
entryBracketCount = 0
exitBracketCount = 0
charCount = 0
endOfLine = len(userInput) - 1
for i in range(len(userInput)):
if (re.search('[\s*a-z\s*A-Z]+', userInput[i])):
charCount = charCount + 1
print operandCount, " 1"
elif (re.search('[\s*0-9]+', userInput[i])):
numCount = numCount + 1
print operandCount, " 2"
elif (re.search('[\*]', userInput[i])):
print 'TRUE'
# operandCount = operandCount + 1
# print operandCount, " 3.5"
# elif (re.search('[\s*\+|\s*\-|\s*\/]+', userInput[i])):
elif (re.search('[+-/*]+', userInput[i])):
operandCount = operandCount + 1
print operandCount, " 3"
# if(re.search('[\s*\+|\s*\-|\s*\/]+', userInput[endOfLine])):
if(re.search('[+-/*]+', userInput[endOfLine])):
print "invalid expression"
print "1"
exit(0)
else:
if((re.search('[\s*a-zA-Z]+', userInput[i - 1])) or (re.search('[\s*\d]+', userInput[i - 1]))):
continue
else:
print 'invalid expression'
print '2'
exit(0)
if(re.search('[\s*\d]+', userInput[i - 1])):
continue
else:
print 'invalid expression'
print '3'
exit(0)
if(re.search('[\s*a-zA-Z]+', userInput[i + 1])):
continue
elif(re.search('[\s*\d]+', userInput[i + 1])):
continue
elif (re.search('[\(]+', userInput[i + 1])):
continue
elif (re.search('[\)]+', userInput[i + 1])):
continue
else:
print 'invalid expression'
print '4'
exit(0)
elif (re.search('[\(]+', userInput[i])):
entryBracketCount = entryBracketCount + 1
print operandCount, " 4"
elif (re.search('[\)]+', userInput[i])):
exitBracketCount = exitBracketCount + 1
print operandCount, " 5"
if(re.search('[\)]+', userInput[endOfLine])):
continue
else:
if(re.search('[\(]+', userInput[i + 1])):
print 'invalid expression'
print '5'
exit(0)
print operandCount, " 6"
if (entryBracketCount != exitBracketCount):
print "invalid expression"
print '6'
exit(0)
elif operandCount == 0:
print operandCount
print "invalid expression"
print '7'
exit(0)
elif ((numCount == 0) and (charCount == 0)):
print "invalid expression"
print '8'
exit(0)
else:
print "valid expression"
| dominickhera/PosaRepo | cis3250labs/parseTest.py | Python | apache-2.0 | 2,244 | 0.039661 |
"""Support for Zigbee switches."""
import voluptuous as vol
from homeassistant.components.switch import SwitchDevice
from . import PLATFORM_SCHEMA, ZigBeeDigitalOut, ZigBeeDigitalOutConfig
CONF_ON_STATE = "on_state"
DEFAULT_ON_STATE = "high"
STATES = ["high", "low"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Optional(CONF_ON_STATE): vol.In(STATES)})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Zigbee switch platform."""
add_entities([ZigBeeSwitch(hass, ZigBeeDigitalOutConfig(config))])
class ZigBeeSwitch(ZigBeeDigitalOut, SwitchDevice):
"""Representation of a Zigbee Digital Out device."""
pass
| leppa/home-assistant | homeassistant/components/zigbee/switch.py | Python | apache-2.0 | 669 | 0.001495 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Python 3.2 code
#
# Copyright (c) 2012 Jeff Smits
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# C3P - C-compatible code preprocessor
# This commandline tool reads a file and expands macro's.
#
# This file is a utility file and doesn't contain the whole tool.
# Also it does not run standalone.
#
# This file imports all the tests
from .acceptance_tests import Acc_test
from .unit_tests import * | Apanatshka/C3P | c3p/tests/all_tests.py | Python | gpl-3.0 | 1,036 | 0.003861 |
#!/usr/bin/env python2
#
# wsi_bot_codebook3
#
# Version 3 of codebook construction:
#
# -uses OpenCV for faster operation - but different local descriptors than in the 1st version;
# -uses annotation files for defining the regions from where the descriptors are to be
# extracted
# - try to optimize the codebook with respect to some class labels
from __future__ import (absolute_import, division, print_function, unicode_literals)
__version__ = 0.1
__author__ = 'Vlad Popovici'
import os
import argparse as opt
import numpy as np
import numpy.linalg
from scipy.stats import ttest_ind
import skimage.draw
import skimage.io
from skimage.exposure import equalize_adapthist, rescale_intensity
import cv2
import cv2.xfeatures2d
from sklearn.cluster import MiniBatchKMeans
from sklearn.lda import LDA
from stain.he import rgb2he
from util.storage import ModelPersistence
def find_in_list(_value, _list):
"""
Returns the indexes of all occurrences of value in a list.
"""
return np.array([i for i, v in enumerate(_list) if v == _value], dtype=int)
def main():
p = opt.ArgumentParser(description="""
Extracts features from annotated regions and constructs a codebook of a given size.
""")
p.add_argument('in_file', action='store', help='a file with image file, annotation file and label (0/1)')
p.add_argument('out_file', action='store', help='resulting model file name')
#p.add_argument('codebook_size', action='store', help='codebook size', type=int)
p.add_argument('-t', '--threshold', action='store', type=int, default=5000,
help='Hessian threshold for SURF features.')
p.add_argument('-s', '--standardize', action='store_true', default=False,
help='should the features be standardized before codebook construction?')
p.add_argument('-v', '--verbose', action='store_true', help='verbose?')
args = p.parse_args()
th = args.threshold
all_image_names, all_descriptors = [], []
all_roi = []
y = []
unique_image_names = []
with open(args.in_file, mode='r') as fin:
for l in fin.readlines():
l = l.strip()
if len(l) == 0:
break
img_file, annot_file, lbl = [z_ for z_ in l.split()][0:3] # file names: image and its annotation and label
y.append(int(lbl))
if args.verbose:
print("Image:", img_file)
img = cv2.imread(img_file)
coords = np.fromfile(annot_file, dtype=int, sep=' ') # x y - values
coords = np.reshape(coords, (coords.size/2, 2), order='C')
# get the bounding box:
xmin, ymin = coords.min(axis=0)
xmax, ymax = coords.max(axis=0)
if args.verbose:
print("\t...H&E extraction")
img = img[ymin:ymax+2, xmin:xmax+2, :] # keep only the region of interest
img_h, _ = rgb2he(img, normalize=True) # get the H- component
img_h = equalize_adapthist(img_h)
img_h = rescale_intensity(img_h, out_range=(0,255))
# make sure the dtype is right for image and the mask: OpenCV is sensitive to data type
img_h = img_h.astype(np.uint8)
if args.verbose:
print("\t...building mask")
mask = np.zeros(img_h.shape, dtype=np.uint8)
r, c = skimage.draw.polygon(coords[:,1]-ymin, coords[:,0]-xmin) # adapt to new image...
mask[r,c] = 1 # everything outside the region is black
if args.verbose:
print("\t...feature detection and computation")
img_h *= mask
feat = cv2.xfeatures2d.SURF_create(hessianThreshold=th)
keyp, desc = feat.detectAndCompute(img_h, mask)
if args.verbose:
print("\t...", str(len(keyp)), "features extracted")
all_descriptors.extend(desc)
all_image_names.extend([img_file] * len(keyp))
unique_image_names.append(img_file)
# end for
X = np.hstack(all_descriptors)
X = np.reshape(X, (len(all_descriptors), all_descriptors[0].size), order='C')
if args.standardize:
# make sure each variable (column) is mean-centered and has unit standard deviation
Xm = np.mean(X, axis=0)
Xs = np.std(X, axis=0)
Xs[np.isclose(Xs, 1e-16)] = 1.0
X = (X - Xm) / Xs
y = np.array(y, dtype=int)
rng = np.random.RandomState(0)
acc = [] # will keep accuracy of the classifier
vqs = [] # all quantizers, to find the best
for k in np.arange(10, 121, 10):
# Method:
# -generate a codebook with k codewords
# -re-code the data
# -compute frequencies
# -estimate classification on best 10 features
if args.verbose:
print("\nK-means clustering (k =", str(k), ")")
print("\t...with", str(X.shape[0]), "points")
#-codebook and re-coding
vq = MiniBatchKMeans(n_clusters=k, random_state=rng,
batch_size=500, compute_labels=True, verbose=False) # vector quantizer
vq.fit(X)
vqs.append(vq)
#-codeword frequencies
frq = np.zeros((len(unique_image_names), k))
for i in range(vq.labels_.size):
frq[unique_image_names.index(all_image_names[i]), vq.labels_[i]] += 1.0
for i in range(len(unique_image_names)):
if frq[i, :].sum() > 0:
frq[i, :] /= frq[i, :].sum()
if args.verbose:
print("...\tfeature selection (t-test)")
pv = np.ones(k)
for i in range(k):
_, pv[i] = ttest_ind(frq[y == 0, i], frq[y == 1, i])
idx = np.argsort(pv) # order of the p-values
if args.verbose:
print("\t...classification performance estimation")
clsf = LDA(solver='lsqr', shrinkage='auto').fit(frq[:,idx[:10]], y) # keep top 10 features
acc.append(clsf.score(frq[:, idx[:10]], y))
acc = np.array(acc)
k = np.arange(10, 121, 10)[acc.argmax()] # best k
if args.verbose:
print("\nOptimal codebook size:", str(k))
# final codebook:
vq = vqs[acc.argmax()]
# compute the average distance and std.dev. of the points in each cluster:
avg_dist = np.zeros(k)
sd_dist = np.zeros(k)
for k in range(0, k):
d = numpy.linalg.norm(X[vq.labels_ == k, :] - vq.cluster_centers_[k, :], axis=1)
avg_dist[k] = d.mean()
sd_dist[k] = d.std()
with ModelPersistence(args.out_file, 'c', format='pickle') as d:
d['codebook'] = vq
d['shift'] = Xm
d['scale'] = Xs
d['standardize'] = args.standardize
d['avg_dist_to_centroid'] = avg_dist
d['stddev_dist_to_centroid'] = sd_dist
return True
if __name__ == '__main__':
main()
| vladpopovici/WSItk | WSItk/tools/wsi_bot_codebook3.py | Python | mit | 7,170 | 0.007671 |
"""Image renderer module."""
from mfr.core import RenderResult
def render_img_tag(fp, src=None, alt=''):
"""A simple image tag renderer.
:param fp: File pointer
:param src: Path to file
:param alt: Alternate text for the image
:return: RenderResult object containing the content html
"""
# Default src to the filename
src = src or fp.name
content = '<img src="{src}" alt="{alt}" />'.format(src=src, alt=alt)
return RenderResult(content)
| icereval/modular-file-renderer | mfr/ext/image/render.py | Python | apache-2.0 | 481 | 0 |
from zeit.cms.i18n import MessageFactory as _
import grokcore.component as grok
import logging
import zeit.cms.interfaces
import zeit.objectlog.interfaces
import zeit.push.interfaces
import zope.cachedescriptors.property
import zope.component
log = logging.getLogger(__name__)
class Message(grok.Adapter):
grok.context(zeit.cms.interfaces.ICMSContent)
grok.implements(zeit.push.interfaces.IMessage)
grok.baseclass()
get_text_from = NotImplemented
def __init__(self, context):
self.context = context
self.config = {}
def send(self):
"""Send push notification to external service.
We *never* want to re-send a push notification on publish, even if the
initial notification failed, since the information could be outdated.
Therefore we must disable the notification before anything else.
Re-sending can be done manually by re-enabling the service.
"""
self._disable_message_config()
if not self.text:
raise ValueError('No text configured')
kw = {}
kw.update(self.config)
kw['message'] = self
try:
notifier = zope.component.getUtility(
zeit.push.interfaces.IPushNotifier, name=self.type)
notifier.send(self.text, self.url, **kw)
self.log_success()
log.info('Push notification for %s sent', self.type)
except Exception, e:
self.log_error(str(e))
log.error(u'Error during push to %s with config %s',
self.type, self.config, exc_info=True)
def _disable_message_config(self):
push = zeit.push.interfaces.IPushMessages(self.context)
push.set(self.config, enabled=False)
@property
def text(self):
push = zeit.push.interfaces.IPushMessages(self.context)
return getattr(push, self.get_text_from)
@property
def type(self):
return self.__class__.__dict__['grokcore.component.directive.name']
@property
def url(self):
config = zope.app.appsetup.product.getProductConfiguration(
'zeit.push')
return zeit.push.interfaces.IPushURL(self.context).replace(
zeit.cms.interfaces.ID_NAMESPACE, config['push-target-url'])
@zope.cachedescriptors.property.Lazy
def object_log(self):
return zeit.objectlog.interfaces.ILog(self.context)
def log_success(self):
self.object_log.log(_(
'Push notification for "${name}" sent.'
' (Message: "${message}", Details: ${details})',
mapping={'name': self.type.capitalize(),
'message': self.text,
'details': self.log_message_details}))
def log_error(self, reason):
self.object_log.log(_(
'Error during push to ${name} ${details}: ${reason}',
mapping={'name': self.type.capitalize(),
'details': self.log_message_details,
'reason': reason}))
@property
def log_message_details(self):
return '-'
@grok.adapter(zeit.cms.interfaces.ICMSContent)
@grok.implementer(zeit.push.interfaces.IPushURL)
def default_push_url(context):
return context.uniqueId
class AccountData(grok.Adapter):
grok.context(zeit.cms.interfaces.ICMSContent)
grok.implements(zeit.push.interfaces.IAccountData)
def __init__(self, context):
super(AccountData, self).__init__(context)
self.__parent__ = context # make security work
@property
def push(self):
return zeit.push.interfaces.IPushMessages(self.context)
@property
def facebook_main_enabled(self):
source = zeit.push.interfaces.facebookAccountSource(None)
service = self.push.get(type='facebook', account=source.MAIN_ACCOUNT)
return service and service.get('enabled')
@facebook_main_enabled.setter
def facebook_main_enabled(self, value):
source = zeit.push.interfaces.facebookAccountSource(None)
self.push.set(dict(
type='facebook', account=source.MAIN_ACCOUNT),
enabled=value)
# We cannot use the key ``text``, since the first positional parameter of
# IPushNotifier.send() is also called text, which causes TypeError.
@property
def facebook_main_text(self):
source = zeit.push.interfaces.facebookAccountSource(None)
service = self.push.get(type='facebook', account=source.MAIN_ACCOUNT)
return service and service.get('override_text')
@facebook_main_text.setter
def facebook_main_text(self, value):
source = zeit.push.interfaces.facebookAccountSource(None)
self.push.set(dict(
type='facebook', account=source.MAIN_ACCOUNT),
override_text=value)
@property
def facebook_magazin_enabled(self):
source = zeit.push.interfaces.facebookAccountSource(None)
service = self.push.get(
type='facebook', account=source.MAGAZIN_ACCOUNT)
return service and service.get('enabled')
@facebook_magazin_enabled.setter
def facebook_magazin_enabled(self, value):
source = zeit.push.interfaces.facebookAccountSource(None)
self.push.set(dict(
type='facebook', account=source.MAGAZIN_ACCOUNT),
enabled=value)
@property
def facebook_magazin_text(self):
source = zeit.push.interfaces.facebookAccountSource(None)
service = self.push.get(
type='facebook', account=source.MAGAZIN_ACCOUNT)
return service and service.get('override_text')
@facebook_magazin_text.setter
def facebook_magazin_text(self, value):
source = zeit.push.interfaces.facebookAccountSource(None)
self.push.set(dict(
type='facebook', account=source.MAGAZIN_ACCOUNT),
override_text=value)
@property
def facebook_campus_enabled(self):
source = zeit.push.interfaces.facebookAccountSource(None)
service = self.push.get(type='facebook', account=source.CAMPUS_ACCOUNT)
return service and service.get('enabled')
@facebook_campus_enabled.setter
def facebook_campus_enabled(self, value):
source = zeit.push.interfaces.facebookAccountSource(None)
self.push.set(dict(
type='facebook', account=source.CAMPUS_ACCOUNT),
enabled=value)
@property
def facebook_campus_text(self):
source = zeit.push.interfaces.facebookAccountSource(None)
service = self.push.get(
type='facebook', account=source.CAMPUS_ACCOUNT)
return service and service.get('override_text')
@facebook_campus_text.setter
def facebook_campus_text(self, value):
source = zeit.push.interfaces.facebookAccountSource(None)
self.push.set(dict(
type='facebook', account=source.CAMPUS_ACCOUNT),
override_text=value)
@property
def twitter_main_enabled(self):
source = zeit.push.interfaces.twitterAccountSource(None)
service = self.push.get(type='twitter', account=source.MAIN_ACCOUNT)
return service and service.get('enabled')
@twitter_main_enabled.setter
def twitter_main_enabled(self, value):
source = zeit.push.interfaces.twitterAccountSource(None)
self.push.set(dict(
type='twitter', account=source.MAIN_ACCOUNT),
enabled=value)
@property
def twitter_ressort_text(self):
return self._nonmain_twitter_service.get('override_text')
@twitter_ressort_text.setter
def twitter_ressort_text(self, value):
self.push.set(
dict(type='twitter', variant='ressort'), override_text=value)
@property
def twitter_ressort(self):
return self._nonmain_twitter_service.get('account')
@twitter_ressort.setter
def twitter_ressort(self, value):
service = self._nonmain_twitter_service
enabled = None
# BBB `variant` was introduced in zeit.push-1.21
if service and 'variant' not in service:
self.push.delete(service)
enabled = service.get('enabled')
self.push.set(
dict(type='twitter', variant='ressort'), account=value)
if enabled is not None:
self.twitter_ressort_enabled = enabled
@property
def twitter_ressort_enabled(self):
return self._nonmain_twitter_service.get('enabled')
@twitter_ressort_enabled.setter
def twitter_ressort_enabled(self, value):
service = self._nonmain_twitter_service
account = None
# BBB `variant` was introduced in zeit.push-1.21
if service and 'variant' not in service:
self.push.delete(service)
account = service.get('account')
self.push.set(
dict(type='twitter', variant='ressort'), enabled=value)
if account is not None:
self.twitter_ressort = account
@property
def _nonmain_twitter_service(self):
source = zeit.push.interfaces.twitterAccountSource(None)
for service in self.push.message_config:
if service['type'] != 'twitter':
continue
if service.get('variant') == 'ressort':
return service
# BBB `variant` was introduced in zeit.push-1.21
if service.get('account') != source.MAIN_ACCOUNT:
return service
return {}
@property
def twitter_print_enabled(self):
source = zeit.push.interfaces.twitterAccountSource(None)
service = self.push.get(type='twitter', account=source.PRINT_ACCOUNT)
return service and service.get('enabled')
@twitter_print_enabled.setter
def twitter_print_enabled(self, value):
source = zeit.push.interfaces.twitterAccountSource(None)
self.push.set(dict(
type='twitter', account=source.PRINT_ACCOUNT),
enabled=value)
@property
def twitter_print_text(self):
source = zeit.push.interfaces.twitterAccountSource(None)
service = self.push.get(type='twitter', account=source.PRINT_ACCOUNT)
return service and service.get('override_text')
@twitter_print_text.setter
def twitter_print_text(self, value):
source = zeit.push.interfaces.twitterAccountSource(None)
self.push.set(
dict(type='twitter', account=source.PRINT_ACCOUNT),
override_text=value)
@property
def mobile_enabled(self):
service = self._mobile_service
return service and service.get('enabled')
@mobile_enabled.setter
def mobile_enabled(self, value):
self._set_mobile_service(enabled=value)
@property
def mobile_title(self):
service = self._mobile_service
return service and service.get('title')
@mobile_title.setter
def mobile_title(self, value):
self._set_mobile_service(title=value)
@property
def mobile_text(self):
service = self._mobile_service
return service and service.get('override_text')
@mobile_text.setter
def mobile_text(self, value):
self._set_mobile_service(override_text=value)
@property
def mobile_uses_image(self):
service = self._mobile_service
return service and service.get('uses_image')
@mobile_uses_image.setter
def mobile_uses_image(self, value):
self._set_mobile_service(uses_image=value)
@property
def mobile_image(self):
service = self._mobile_service
if not service:
return None
return zeit.cms.interfaces.ICMSContent(service.get('image'), None)
@mobile_image.setter
def mobile_image(self, value):
if value is not None:
value = value.uniqueId
self._set_mobile_service(image=value)
@property
def mobile_buttons(self):
service = self._mobile_service
return service and service.get('buttons')
@mobile_buttons.setter
def mobile_buttons(self, value):
self._set_mobile_service(buttons=value)
@property
def mobile_payload_template(self):
service = self._mobile_service
return service and zeit.push.interfaces.PAYLOAD_TEMPLATE_SOURCE\
.factory.find(service.get('payload_template'))
@mobile_payload_template.setter
def mobile_payload_template(self, value):
if value is None:
token = None
else:
token = zeit.push.interfaces.PAYLOAD_TEMPLATE_SOURCE\
.factory.getToken(value)
self._set_mobile_service(payload_template=token)
@property
def _mobile_service(self):
service = self.push.get(type='mobile', variant='manual')
if service:
return service
# BBB `variant` was introduced in zeit.push-1.26.0
service = self.push.get(type='mobile')
if service and not service.get('variant'):
return service
def _set_mobile_service(self, **kw):
service = self._mobile_service
# BBB `variant` was introduced in zeit.push-1.26.0
if service and not service.get('variant'):
self.push.delete(service)
for key, value in service.items():
if key not in kw:
kw[key] = value
self.push.set(dict(type='mobile', variant='manual'), **kw)
| ZeitOnline/zeit.push | src/zeit/push/message.py | Python | bsd-3-clause | 13,344 | 0 |
import sys
import os
from scale_model import StartupDataModel, VCModel
from flask.ext.restful import Resource, reqparse
from flask import Flask, jsonify, request, make_response
import os
from database import db
from flask.ext.security import current_user
from json import dumps
class Scale_DAO(object):
def __init__(self):
print 'making scale DAO'
self.user_scale = StartupDataModel.query.filter_by(username=current_user.email).order_by(StartupDataModel.date.desc()).first()
print self.user_scale
class Scale_resource(Resource):
def get(self, **kwargs):
"""
TODO: get old data to render in form as default
"""
#check= request.args.get('check')
if current_user.is_anonymous():
return jsonify(status=400)
scale = Scale_DAO()
if scale.user_scale:
return make_response(dumps(scale.user_scale.as_dict()))
else:
return jsonify(scale_authed=False)
def post(self):
"""
TODO: add update instead of just creating whole new record
"""
if current_user.is_anonymous():
return jsonify(msg="You are no longer logged in",status=400)
try:
data = request.json
cb_url = data.get('crunchbase_url')
al_url = data.get('angellist_url')
description = data.get('description')
new_data = StartupDataModel(username=current_user.email, crunchbase_url=cb_url, angellist_url=al_url, description=description)
db.session.add(new_data)
db.session.commit()
return jsonify(status=200,msg="Data added successfully!")
except:
jsonify(msg="Error adding your data.")
| wigginslab/lean-workbench | lean_workbench/scale/scale_resource.py | Python | mit | 1,731 | 0.005777 |
import numpy
# from nmt import train
# from nmtlm import train
from nmt import train
def main(job_id, params):
print params
trainerr, validerr, testerr = train(saveto=params['model'][0],
reload_=params['reload'][0],
dim_word=params['dim_word'][0],
dim=params['dim'][0],
n_words=params['n-words'][0],
n_words_src=params['n-words'][0],
decay_c=params['decay-c'][0],
clip_c=params['clip-c'][0],
lrate=params['learning-rate'][0],
optimizer=params['optimizer'][0],
maxlen=50,
batch_size=16,
valid_batch_size=16,
validFreq=5000,
dispFreq=10,
saveFreq=5000,
sampleFreq=10,
use_dropout=params['use-dropout'][0])
return validerr
if __name__ == '__main__':
main(0, {
'model': ['model.npz'],
'dim_word': [384],
'dim': [512],
'n-words': [30000],
'optimizer': ['adam'],
'decay-c': [0.],
'clip-c': [10.],
'use-dropout': [False],
'learning-rate': [0.0001],
'reload': [False]})
| tangyaohua/dl4mt | session2/train_nmt.py | Python | bsd-3-clause | 1,646 | 0.003645 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bigpanda
author: "Hagai Kariti (@hkariti)"
short_description: Notify BigPanda about deployments
version_added: "1.8"
description:
- Notify BigPanda when deployments start and end (successfully or not). Returns a deployment object containing all the parameters for future module calls.
options:
component:
description:
- "The name of the component being deployed. Ex: billing"
required: true
aliases: ['name']
version:
description:
- The deployment version.
required: true
token:
description:
- API token.
required: true
state:
description:
- State of the deployment.
required: true
choices: ['started', 'finished', 'failed']
hosts:
description:
- Name of affected host name. Can be a list.
required: false
default: machine's hostname
aliases: ['host']
env:
description:
- The environment name, typically 'production', 'staging', etc.
required: false
owner:
description:
- The person responsible for the deployment.
required: false
description:
description:
- Free text description of the deployment.
required: false
url:
description:
- Base URL of the API server.
required: False
default: https://api.bigpanda.io
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
# informational: requirements for nodes
requirements: [ ]
'''
EXAMPLES = '''
- bigpanda:
component: myapp
version: '1.3'
token: '{{ bigpanda_token }}'
state: started
- bigpanda:
component: myapp
version: '1.3'
token: '{{ bigpanda_token }}'
state: finished
# If outside servers aren't reachable from your machine, use delegate_to and override hosts:
- bigpanda:
component: myapp
version: '1.3'
token: '{{ bigpanda_token }}'
hosts: '{{ ansible_hostname }}'
state: started
delegate_to: localhost
register: deployment
- bigpanda:
component: '{{ deployment.component }}'
version: '{{ deployment.version }}'
token: '{{ deployment.token }}'
state: finished
delegate_to: localhost
'''
# ===========================================
# Module execution.
#
import json
import socket
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import fetch_url
def main():
module = AnsibleModule(
argument_spec=dict(
component=dict(required=True, aliases=['name']),
version=dict(required=True),
token=dict(required=True, no_log=True),
state=dict(required=True, choices=['started', 'finished', 'failed']),
hosts=dict(required=False, default=[socket.gethostname()], aliases=['host']),
env=dict(required=False),
owner=dict(required=False),
description=dict(required=False),
message=dict(required=False),
source_system=dict(required=False, default='ansible'),
validate_certs=dict(default='yes', type='bool'),
url=dict(required=False, default='https://api.bigpanda.io'),
),
supports_check_mode=True,
check_invalid_arguments=False,
)
token = module.params['token']
state = module.params['state']
url = module.params['url']
# Build the common request body
body = dict()
for k in ('component', 'version', 'hosts'):
v = module.params[k]
if v is not None:
body[k] = v
if not isinstance(body['hosts'], list):
body['hosts'] = [body['hosts']]
# Insert state-specific attributes to body
if state == 'started':
for k in ('source_system', 'env', 'owner', 'description'):
v = module.params[k]
if v is not None:
body[k] = v
request_url = url + '/data/events/deployments/start'
else:
message = module.params['message']
if message is not None:
body['errorMessage'] = message
if state == 'finished':
body['status'] = 'success'
else:
body['status'] = 'failure'
request_url = url + '/data/events/deployments/end'
# Build the deployment object we return
deployment = dict(token=token, url=url)
deployment.update(body)
if 'errorMessage' in deployment:
message = deployment.pop('errorMessage')
deployment['message'] = message
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=True, **deployment)
# Send the data to bigpanda
data = json.dumps(body)
headers = {'Authorization':'Bearer %s' % token, 'Content-Type':'application/json'}
try:
response, info = fetch_url(module, request_url, data=data, headers=headers)
if info['status'] == 200:
module.exit_json(changed=True, **deployment)
else:
module.fail_json(msg=json.dumps(info))
except Exception as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/monitoring/bigpanda.py | Python | bsd-3-clause | 5,763 | 0.002776 |
import socket
import random
from PIL import Image
import json
import sys, getopt
import math
import pika
# Screen VARS
offset_x = 80
offset_y = 24
screen_width = 240
screen_height = 240
# Internal options
queueAddress = ''
fileName = ''
workers = 36
Matrix = []
def main(argv):
global fileName, workers
inputFile = ''
try:
opts, args = getopt.getopt(argv, "hi:w:", ["file=", "workers="])
except getopt.GetoptError:
print('img_to_queue.py -i <inputfile> -w workers')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('img_to_queue.py -i <inputfile> -w workers')
sys.exit()
elif opt in ("-i", "--file"):
fileName = arg
print("File to process: " + fileName)
elif opt in ("-w", "--workers"):
workers = int(arg)
if (math.sqrt(float(workers)) - int(math.sqrt(float(workers))) > 0):
print('The square root of amount of workers is not a whole numbers. GTFO!')
sys.exit()
print("Amount of available workers: " + str(workers))
pompImage()
def addPixelToWorkFile(x, y, r, g, b, index_x, index_y, Matrix):
#print("Current index x:" + str(index_x) + " y: " + str(index_y))
Matrix[index_x][index_y].append({'x': x, 'y': y, 'rgb': "%0.2X" % r + '' + "%0.2X" % g + '' + "%0.2X" % b})
def pompImage():
print("Processiong image to JSON")
im = Image.open(fileName).convert('RGB')
im.thumbnail((240, 240), Image.ANTIALIAS)
_, _, width, height = im.getbbox()
# start with x and y index 1
slice_size = int(screen_width / int(math.sqrt(workers)))
amount_of_keys = int(screen_width / slice_size)
print(amount_of_keys)
w, h = amount_of_keys, amount_of_keys
Matrix = [[[] for x in range(w)] for y in range(h)]
# workFile = [[0 for x in range(amount_of_keys)] for y in range(amount_of_keys)]
for x in range(width):
index_x = int((x / slice_size))
for y in range(height):
r, g, b = im.getpixel((x, y))
index_y = int((y / slice_size))
addPixelToWorkFile(x + offset_x, y + offset_y, r, g, b, index_x, index_y, Matrix)
# print("Current index x:"+str(index_x)+" y: "+str(index_y)+" WORKER:"+str(index_y*index_x))
sendToQueue(Matrix)
def sendToQueue(arrayOfWorkers):
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost',
credentials=pika.PlainCredentials(username='pomper',
password='pomper')))
channel = connection.channel()
channel.queue_declare(queue='pomper', durable=False,)
channel.queue_purge(queue='pomper')
for worker in arrayOfWorkers:
for pixels in worker:
channel.basic_publish(exchange='',
routing_key='pomper',
body=json.dumps(pixels))
if __name__ == "__main__":
main(sys.argv[1:])
| jargij/led-pomper-sha2017 | img_to_queue.py | Python | mit | 3,094 | 0.005171 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Table, MetaData, String
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
networks = Table('networks', meta, autoload=True)
networks.c.dns.alter(name='dns1')
dns2 = Column('dns2', String(255))
networks.create_column(dns2)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
networks = Table('networks', meta, autoload=True)
networks.c.dns1.alter(name='dns')
networks.drop_column('dns2')
| sileht/deb-openstack-nova | nova/db/sqlalchemy/migrate_repo/versions/035_secondary_dns.py | Python | apache-2.0 | 1,198 | 0 |
# Generated by Django 2.1.5 on 2019-10-01 19:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('comment', '0005_auto_20191001_1559'),
]
operations = [
migrations.AlterField(
model_name='dataset',
name='dts_type',
field=models.CharField(choices=[('0', 'User Comment'), ('1', 'Validation History'), ('2', 'Reported Issue')], default='0', help_text='Differentiate user comments from automatic validation or defect comments.', max_length=1, verbose_name='Type'),
),
]
| linea-it/dri | api/comment/migrations/0006_auto_20191001_1943.py | Python | gpl-3.0 | 596 | 0.001678 |
#!/usr/bin/env python
#coding:utf-8
# Author: mozman --<mozman@gmx.at>
# Purpose: test mixin Clipping
# Created: 31.10.2010
# Copyright (C) 2010, Manfred Moitzi
# License: GPLv3
import unittest
from svgwrite.mixins import Clipping
from svgwrite.base import BaseElement
class SVGMock(BaseElement, Clipping):
elementname = 'svg'
class TestClipping(unittest.TestCase):
def test_clip_rect_numbers(self):
obj = SVGMock(debug=True)
obj.clip_rect(1, 2, 3, 4)
self.assertEqual(obj['clip'], 'rect(1,2,3,4)')
def test_clip_rect_auto(self):
obj = SVGMock(debug=True)
obj.clip_rect('auto', 'auto', 'auto', 'auto')
self.assertEqual(obj['clip'], 'rect(auto,auto,auto,auto)')
if __name__=='__main__':
unittest.main() | hirobert/svgwrite | tests/test_clipping.py | Python | gpl-3.0 | 800 | 0.0075 |
from typing import Iterable, Callable, Optional, Any, List, Iterator
from dupescan.fs._fileentry import FileEntry
from dupescan.fs._root import Root
from dupescan.types import AnyPath
FSPredicate = Callable[[FileEntry], bool]
ErrorHandler = Callable[[EnvironmentError], Any]
def catch_filter(inner_filter: FSPredicate, error_handler_func: ErrorHandler) -> FSPredicate:
# If no filter function provided, return one that includes everything. In
# this case it will never raise an error, so error_handler_func doesn't get
# a look-in here
if inner_filter is None:
def always_true(*args, **kwargs):
return True
return always_true
# Otherwise if the filter function throws an EnvironmentError, pass it to
# the error_handler_func (if provided) and return false
def wrapped_func(*args, **kwargs):
try:
return inner_filter(*args, **kwargs)
except EnvironmentError as env_error:
if error_handler_func is not None:
error_handler_func(env_error)
return False
return wrapped_func
def noerror(_):
pass
class Walker(object):
def __init__(
self,
recursive: bool,
dir_object_filter: Optional[FSPredicate]=None,
file_object_filter: Optional[FSPredicate]=None,
onerror: Optional[ErrorHandler]=None
):
self._recursive = bool(recursive)
self._onerror = noerror if onerror is None else onerror
self._dir_filter = catch_filter(dir_object_filter, self._onerror)
self._file_filter = catch_filter(file_object_filter, self._onerror)
def __call__(self, paths: Iterable[AnyPath]) -> Iterator[FileEntry]:
for root_index, root_path in enumerate(paths):
root_spec = Root(root_path, root_index)
try:
root_obj = FileEntry.from_path(root_path, root_spec)
except EnvironmentError as env_error:
self._onerror(env_error)
continue
if root_obj.is_dir and self._dir_filter(root_obj):
if self._recursive:
yield from self._recurse_dir(root_obj)
else:
yield root_obj
elif root_obj.is_file and self._file_filter(root_obj):
yield root_obj
def _recurse_dir(self, root_obj: FileEntry):
dir_obj_q: List[FileEntry] = [ root_obj ]
next_dirs: List[FileEntry] = [ ]
while len(dir_obj_q) > 0:
dir_obj = dir_obj_q.pop()
next_dirs.clear()
try:
for child_obj in dir_obj.dir_content():
try:
if (
child_obj.is_dir and
not child_obj.is_symlink and
self._dir_filter(child_obj)
):
next_dirs.append(child_obj)
elif (
child_obj.is_file and
self._file_filter(child_obj)
):
yield child_obj
except EnvironmentError as query_error:
self._onerror(query_error)
except EnvironmentError as env_error:
self._onerror(env_error)
dir_obj_q.extend(reversed(next_dirs))
def flat_iterator(
paths: Iterable[AnyPath],
dir_object_filter: Optional[FSPredicate]=None,
file_object_filter: Optional[FSPredicate]=None,
onerror: Optional[ErrorHandler]=None
) -> Iterator[FileEntry]:
return Walker(False, dir_object_filter, file_object_filter, onerror)(paths)
def recurse_iterator(
paths: Iterable[AnyPath],
dir_object_filter: Optional[FSPredicate]=None,
file_object_filter: Optional[FSPredicate]=None,
onerror: Optional[ErrorHandler]=None
) -> Iterator[FileEntry]:
return Walker(True, dir_object_filter, file_object_filter, onerror)(paths)
| yellcorp/dupescan | dupescan/fs/_walker.py | Python | mit | 4,089 | 0.005625 |
#!/usr/bin/env python3
from pyserv.databrowse import main
main()
| Vierkantor/PyServ | run.py | Python | gpl-3.0 | 67 | 0 |
import os, sys
up_path = os.path.abspath('..')
sys.path.append(up_path)
from numpy import *
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from matplotlib import rc
from objects import SimObject
from utils import scalar
from covar import draw_ellipsoid, vec2cov, cov2vec,\
project_psd
from kalman_filter import ekf_update
from numpy.random import multivariate_normal as mvn
import time
from math import atan2, atan
import robots
from openravepy import *
from transforms import unscented_transform
from rave_draw import *
#import openravepy as rave
class RaveLocalizerBot(robots.Robot):
NX = -1
NU = -1
def __init__(self, bot, obj):
self.bot = bot
self.NX = bot.NX + 3 #FIXME (hack for now)
self.NU = bot.NU
self.dt = bot.dt
x = array(zeros((self.NX)))
for t in range(bot.NX):
x[t] = bot.x[t]
x[bot.NX] = obj[0]
x[bot.NX+1] = obj[1]
x[bot.NX+2] = obj[2]
self.EPS = bot.EPS
robots.Robot.__init__(self, x, dt=self.dt)
def dynamics(self, X, u):
bot_up = self.bot.dynamics(X[0:self.bot.NX], u)
return vstack((bot_up, X[self.bot.NX:]))
def collision_penalty_trajectory(self, x, env):
return 0 #Todo: FIXME
def camera_obj_state(self,x):
#Returns the transform of the camera and object
camera_transform = self.bot.camera_transform(x[0:self.bot.NX])
obj_pos = x[self.bot.NX:]
z = mat(zeros((10,1)))
z[0:7] = camera_transform
z[7:10] = obj_pos
return z
"""
def fov_state(self, x):
xy = mat(self.bot.traj_pos(x)).T
theta = self.bot.orientation(x)
#print vstack((xy, theta, x[self.bot.NX:]))
if isinstance(x, tuple) or len(x.shape) == 1:
x = mat(x).T
if isinstance(xy, tuple) or xy.shape[0] < xy.shape[1]:
xy = mat(xy).T
return vstack((xy, theta, x[self.bot.NX:]))
"""
def observe(self, scene, x=None):
zs = self.bot.observe(scene, x[0:self.bot.NX])
return vstack((zs, robots.Robot.observe(self, scene, x)))
def draw_trajectory(self, xs, mus=None, Sigmas=None, color=array((1.0, 0.0, 0.0, 0.2))):
bnx = self.bot.NX
self.bot.draw_trajectory(xs[0:bnx], mus[0:bnx], Sigmas[0:bnx, 0:bnx], color)
def draw(self, X=None, color=array((1.0, 0.0, 0.0))):
self.bot.draw(x[0:bnx], color)
class BarretWAM(robots.Robot):
# wrapper for openrave robots
NX = 7
NU = 7
EPS = 1e-3
def __init__(self, ravebot, env, state_rep='angles', dt=-1):
self.ravebot = ravebot
self.env = env # used for drawing purposes
self.state_rep = state_rep
self.handles = [ ] # used for drawing purposes
self.jointnames = ['Shoulder_Yaw', 'Shoulder_Pitch', 'Shoulder_Roll', 'Elbow', 'Wrist_Yaw', 'Wrist_Pitch', 'Wrist_Roll']
self.jointidxs = [ravebot.GetJoint(name).GetDOFIndex() for name in self.jointnames]
self.ravebot_manip = self.ravebot.SetActiveManipulator('arm')
self.lower_limits, self.upper_limits = self.ravebot.GetDOFLimits()
tmp_lower_limits = []
tmp_upper_limits = []
for idx in self.jointidxs:
tmp_lower_limits.append(self.lower_limits[idx])
tmp_upper_limits.append(self.upper_limits[idx])
self.lower_limits = mat(array(tmp_lower_limits)).T
self.upper_limits = mat(array(tmp_upper_limits)).T
self.ravebot.SetActiveDOFs(self.jointidxs)
x = [0] * len(self.jointidxs)
robots.Robot.__init__(self, x, dt=dt)
self.index = BarretWAM.increment_index()
def traj_pos(self, x=None):
if x == None:
x = self.x
if self.state_rep == 'angles':
return mat(self.forward_kinematics(x)[0:3,3])
else: #state representation = points
pass
def orientation(self, x=None):
if x == None:
x = self.x
if self.state_rep == 'angles':
return self.forward_kinematics(x)[0:3,0:3]
else:
pass
def __str__(self):
return 'ravebot[' + str(self.index) + ']'
def dynamics(self, x, u):
if self.state_rep == 'angles':
thetas = x + u
thetas = minimum(thetas, self.upper_limits)
thetas = maximum(thetas, self.lower_limits)
"""
for i in range(thetas.shape[0]):
if thetas[i] > self.upper_limits[i]:
thetas[i] = self.upper_limits[i]
elif thetas[i] < self.lower_limits[i]:
thetas[i] = self.lower_limits[i]
"""
return thetas
else:
pass
def camera_transform(self, x):
camera_rel_transform = self.ravebot.GetAttachedSensor('camera').GetRelativeTransform()
with self.env:
self.ravebot.SetDOFValues(x, self.jointidxs)
link_transform = mat(self.ravebot.GetLink('wam4').GetTransform())
camera_trans = link_transform * camera_rel_transform
camera_quat = quatFromRotationMatrix(array(camera_trans[0:3,0:3]))
camera_vec = mat(zeros((7,1)))
camera_vec[0:3] = camera_trans[0:3,3]
camera_vec[3:7] = mat(camera_quat).T
return camera_vec
def observe(self, scene, x=None):
if x==None:
x = self.x
zs = robots.Robot.observe(self, scene, x)
# also give joint angle observations
#if zs.size > 0:
# pass
#zs = vstack((zs, mat('x[2]')))
#zs = vstack((zs, mat('x[3]')))
#else:
# zs = mat('x[3]')
return zs
def forward_kinematics(self, thetas):
with self.env:
self.ravebot.SetDOFValues(thetas,self.jointidxs)
return mat(self.ravebot_manip.GetEndEffectorTransform())
def inverse_kinematics(self, xyz):
pass
def draw_Cspace(self, X=None, color='blue'):
pass
def collision_penalty_trajectory(self, x, env):
return 0 #Todo: FIXME
def draw_trajectory(self, xs, mus=None, Sigmas=None, color=array((1.0, 0.0, 0.0, 0.2))):
T = xs.shape[1]
XYZ = mat(zeros((3,T)))
for t in range(T):
XYZ[:,t] = self.traj_pos(xs[:,t])
if mus != None and Sigmas != None:
for t in range(T):
mu_y, Sigma_y = unscented_transform(mus[:,t], Sigmas[:,:,t],\
lambda x: self.traj_pos(x))
# padding for positive definiteness
Sigma_y = Sigma_y + 0.0001 * identity(3)
self.handles.append(draw_ellipsoid(mu_y, Sigma_y, std_dev=2,\
env=self.env, colors=color))
#self.handles.append(self.env.drawlinestrip(points=array(((xyz[0], xyz[1], xyz[2]),(0.0, 0.0,0.0))),
# linewidth=3.0))
self.handles.append(self.env.drawlinestrip(points=XYZ.T, linewidth=3.0, colors=color[0:3]))
def draw(self, X=None, color=array((1.0, 0.0, 0.0))):
if X == None:
X = self.x
xyz = self.traj_pos(X)
with self.env:
"""
# works with only a few robots
newrobot = RaveCreateRobot(self.env,self.ravebot.GetXMLId())
newrobot.Clone(self.ravebot,0)
for link in newrobot.GetLinks():
for geom in link.GetGeometries():
geom.SetTransparency(0.6)
self.env.Add(newrobot,True)
newrobot.SetActiveDOFs(self.jointidxs)
newrobot.SetDOFValues(X, self.jointidxs)
self.handles.append(newrobot)
"""
self.handles.append(self.env.plot3(points=xyz, pointsize=1.0, colors=color))
| viswimmer1/PythonGenerator | data/python_files/30585323/ravebot.py | Python | gpl-2.0 | 7,909 | 0.013529 |
class Solution:
def toLowerCase(self, str: str) -> str:
rs = ""
# 32
section = ord("a") - ord("A")
for s in str:
if ord(s) >= ord("A") and ord(s) <= ord("Z"):
rs = rs + chr(ord(s) + section)
else:
rs = rs + s
return rs
sol = Solution()
print(sol.toLowerCase("Hello"))
| yleo77/leetcode | To_Lower_Case/answer.py | Python | mit | 372 | 0.002688 |
from .nucleicacidpartitemcontroller import NucleicAcidPartItemController
from .oligoitemcontroller import OligoItemController
from .stranditemcontroller import StrandItemController
from .viewrootcontroller import ViewRootController
from .virtualhelixitemcontroller import VirtualHelixItemController | scholer/cadnano2.5 | cadnano/controllers/__init__.py | Python | mit | 298 | 0.003356 |
r""""DeepHOL large scale reporting in Apache Beam."""
from __future__ import absolute_import
from __future__ import division
# Import Type Annotations
from __future__ import print_function
import io
import os
import apache_beam as beam
from apache_beam.metrics import Metrics
import matplotlib.pyplot as plot
import tensorflow as tf
from tf import gfile
from typing import List
from typing import Text
from google.protobuf import text_format
from deepmath.deephol import deephol_pb2
from deepmath.deephol import io_util
from deepmath.deephol.deephol_loop.missing import recordio
from deepmath.deephol.deephol_loop.missing import runner
from deepmath.deephol.utilities import deephol_stat_pb2
from deepmath.deephol.utilities import stats
STATS_BASENAME = 'proof_stats'
AGGREGATE_STAT_BASENAME = 'aggregate_stat'
PROVEN_GOALS_BASENAME = 'proven_goals_fps'
OPEN_GOALS_BASENAME = 'open_goals_fps'
PROVEN_STATS_BASENAME = 'proven_stats'
PRETTY_STATS_BASENAME = 'pretty_stats'
CACTUS_PLOT_FILE_NAME = 'cactus.pdf'
CACTUS_DATA_FILE_NAME = 'cactus.dat'
class StatDoFn(beam.DoFn):
"""Beam DoFn for statistics generation."""
def __init__(self):
self.processed_counter = Metrics.counter(self.__class__, 'processed')
self.proven_counter = Metrics.counter(self.__class__, 'proven')
self.attempted_counter = Metrics.counter(self.__class__, 'attempted')
self.nodes_counter = Metrics.counter(self.__class__, 'nodes')
def process(self, proof_log: deephol_pb2.ProofLog
) -> List[deephol_stat_pb2.ProofStat]:
self.processed_counter.inc()
s = stats.proof_log_stats(proof_log)
self.proven_counter.inc(s.num_theorems_proved)
self.attempted_counter.inc(s.num_theorems_attempted)
self.nodes_counter.inc(s.num_nodes)
return [s]
class AggregateStatsFn(beam.CombineFn):
"""Beam CombineFn for statistics aggregation."""
def create_accumulator(self):
return deephol_stat_pb2.ProofAggregateStat()
def add_input(self, target, source):
stats.merge_stat(target, source)
return target
def merge_accumulators(self, aggregate_stats):
result = deephol_stat_pb2.ProofAggregateStat()
for s in aggregate_stats:
stats.merge_aggregate_stat(result, s)
return result
def extract_output(self, result):
return result
class UniqueFn(beam.CombineFn):
"""De-duping combinator for Beam."""
def create_accumulator(self):
return set()
def add_input(self, target, source):
target.add(source)
return target
def merge_accumulators(self, sets):
result = set()
for s in sets:
result.update(s)
return result
def extract_output(self, result):
return '\n'.join([str(x) for x in result])
def proven_or_open(proof_stat):
if proof_stat.num_theorems_proved > 0:
yield beam.pvalue.TaggedOutput('proven',
'%d' % proof_stat.theorem_fingerprint)
else:
yield beam.pvalue.TaggedOutput('open',
'%d' % proof_stat.theorem_fingerprint)
def make_proof_logs_collection(root, proof_logs: Text):
return (root | 'Create' >> recordio.ReadFromRecordIO(
proof_logs, beam.coders.ProtoCoder(deephol_pb2.ProofLog)))
def reporting_pipeline(proof_logs_collection, stats_out: Text,
aggregate_stats: Text, proven_goals: Text,
open_goals: Text):
"""A pipeline reporting aggregate statistics and proved theorems.
Args:
proof_logs_collection: beam collection of proof logs.
stats_out: Filename for outputting per proof statistics.
aggregate_stats: Filename for storing aggregated statistics
proven_goals: Filename for the fingerprint of proven goals.
open_goals: Filename for the fingerprint of open goals.
Returns:
A beam pipeline for writing statistics.
"""
proof_stats = (proof_logs_collection | 'Stats' >> beam.ParDo(StatDoFn()))
_ = proof_stats | 'WriteStats' >> recordio.WriteToRecordIO(
file_path_prefix=stats_out,
coder=beam.coders.ProtoCoder(deephol_stat_pb2.ProofStat))
_ = (
proof_stats
| 'AggregateStats' >> beam.CombineGlobally(AggregateStatsFn())
| 'MapProtoToString' >> beam.Map(text_format.MessageToString)
| 'WriteAggregates' >> beam.io.WriteToText(aggregate_stats, '.pbtxt'))
results = proof_stats | (
'ProvenOrOpen' >> beam.FlatMap(proven_or_open).with_outputs())
_ = (
results.proven
| 'UniqueProven' >> beam.CombineGlobally(UniqueFn())
| 'WriteProven' >> beam.io.WriteToText(proven_goals, '.txt'))
_ = (
results.open
| 'UniqueOpen' >> beam.CombineGlobally(UniqueFn())
| 'WriteOpen' >> beam.io.WriteToText(open_goals, '.txt'))
def file_lines_set(fname):
with gfile.Open(fname) as f:
return set([line.rstrip() for line in f])
class ReportingPipeline(object):
"""Top level class to manage a reporting pipeline."""
def __init__(self, out_dir: Text):
self.out_dir = out_dir
gfile.MakeDirs(out_dir)
self.proof_stats_filename = os.path.join(out_dir, STATS_BASENAME)
self.aggregate_stat_filename = os.path.join(out_dir,
AGGREGATE_STAT_BASENAME)
self.proven_goals_filename = os.path.join(out_dir, PROVEN_GOALS_BASENAME)
self.open_goals_filename = os.path.join(out_dir, OPEN_GOALS_BASENAME)
self.proven_stats_filename = os.path.join(out_dir, PROVEN_STATS_BASENAME)
self.pretty_stats_filename = os.path.join(out_dir, PRETTY_STATS_BASENAME)
self.cactus_plot_filename = os.path.join(out_dir, CACTUS_PLOT_FILE_NAME)
self.cactus_data_filename = os.path.join(out_dir, CACTUS_DATA_FILE_NAME)
def setup_pipeline(self, proof_logs_collection):
reporting_pipeline(proof_logs_collection, self.proof_stats_filename,
self.aggregate_stat_filename, self.proven_goals_filename,
self.open_goals_filename)
def write_final_stats(self):
"""Log and write final aggregated statistics to file system."""
fname = self.aggregate_stat_filename + '-00000-of-00001.pbtxt'
aggregate_stat = io_util.load_text_proto(
fname, deephol_stat_pb2.ProofAggregateStat, 'aggregate statistics')
if aggregate_stat is None:
tf.logging.warning('Could not read aggregate statistics "%s"', fname)
return
tf.logging.info('Stats:\n%s',
stats.aggregate_stat_to_string(aggregate_stat))
open_goals = file_lines_set(self.open_goals_filename +
'-00000-of-00001.txt')
proven_goals = file_lines_set(self.proven_goals_filename +
'-00000-of-00001.txt')
never_proven = open_goals - proven_goals
num_open_goals = len(never_proven)
num_proven_goals = len(proven_goals)
tf.logging.info('Open goals: %d', num_open_goals)
tf.logging.info('Proved goals: %d', num_proven_goals)
perc_proven = 100.0 * num_proven_goals / float(num_open_goals +
num_proven_goals)
tf.logging.info('Percentage proven: %.2f', perc_proven)
with gfile.Open(self.proven_stats_filename, 'w') as f:
f.write('%d %d %.2f\n' % (num_open_goals, num_proven_goals, perc_proven))
with gfile.Open(self.pretty_stats_filename, 'w') as f:
f.write('%s\n' % stats.detailed_statistics(aggregate_stat))
# Write cactus plot
if aggregate_stat.proof_closed_after_millis:
cactus_data = list(aggregate_stat.proof_closed_after_millis)
cactus_data.sort()
with gfile.Open(self.cactus_data_filename, 'w') as f:
f.write('\n'.join(map(str, cactus_data)))
fig = plot.figure()
plot.xlabel('Number of proofs closed')
plot.ylabel('Wall clock time in s')
plot.plot([ms * .001 for ms in cactus_data]) # convert to seconds
buf = io.BytesIO()
fig.savefig(buf, format='pdf', bbox_inches='tight')
with gfile.Open(self.cactus_plot_filename, 'wb') as f:
f.write(buf.getvalue())
def run_pipeline(self, proof_logs: Text):
def pipeline(root):
proof_logs_collection = make_proof_logs_collection(root, proof_logs)
self.setup_pipeline(proof_logs_collection)
runner.Runner().run(pipeline).wait_until_finish()
self.write_final_stats()
tf.logging.info('Finished reporting.')
| tensorflow/deepmath | deepmath/deephol/deephol_loop/report.py | Python | apache-2.0 | 8,295 | 0.005907 |
def propagate(la): # la: [list(int)]
print la, la # [str], [str]
propagate([1]) # []
propagate([2]) # []
| shedskin/shedskin | tests/28.py | Python | gpl-3.0 | 210 | 0.009524 |
from brms.settings.base import *
import dj_database_url
DEBUG = False
ALLOWED_HOSTS = ['.example.com']
# Use the cached template loader so template is compiled once and read from
# memory instead of reading from disk on each load.
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
]
DATABASES['default'] = dj_database_url.config() | maurobaraldi/brms | brms/_settings/production.py | Python | mit | 491 | 0.002037 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 19:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='post',
name='url',
field=models.URLField(),
),
]
| rodriguesrl/reddit-clone-udemy | posts/migrations/0002_auto_20170307_1920.py | Python | mit | 419 | 0 |
# -*- coding: utf-8 -*-
import os,math
from qgis.core import NULL
from mole import oeq_global
from mole.project import config
from mole.extensions import OeQExtension
from mole.stat_corr import rb_contemporary_base_uvalue_by_building_age_lookup
def calculation(self=None, parameters={},feature = None):
from math import floor, ceil
from PyQt4.QtCore import QVariant
ahde = NULL
if not oeq_global.isnull([parameters['HLAE']]):
ahde=float(parameters['HLAE']) + 40.0 * 0.8
# Air Change Heatloss for standard Rooms 40kWh/m2a nach Geiger Lüftung im Wohnungsbau
# 20% of the Total Area are used for stairs and floors
return {'AHDE': {'type': QVariant.Double, 'value': ahde}}
extension = OeQExtension(
extension_id=__name__,
category='Evaluation',
subcategory='Building',
extension_name='AHD Building per Livig Area EnEV',
layer_name= 'Annual Heat Demand (per Living Area, EnEV)',
extension_filepath=os.path.join(__file__),
colortable = os.path.join(os.path.splitext(__file__)[0] + '.qml'),
field_id='AHDE',
source_type='none',
par_in=['HLAE'],
sourcelayer_name=config.data_layer_name,
targetlayer_name=config.data_layer_name,
active=True,
show_results=['AHDE'],
description=u"Calculate EnEV Annual Heat Demand per Living Area",
evaluation_method=calculation)
extension.registerExtension(default=True)
| UdK-VPT/Open_eQuarter | mole/extensions/eval_enev/oeq_AHDE.py | Python | gpl-2.0 | 1,413 | 0.008499 |
"""
Serializers for Video Abstraction Layer
Serialization is usually sent through the VideoSerializer which uses the
EncodedVideoSerializer which uses the profile_name as it's profile field.
"""
from rest_framework import serializers
from django.core.exceptions import ValidationError
from edxval.models import Profile, Video, EncodedVideo, Subtitle, CourseVideo
class EncodedVideoSerializer(serializers.ModelSerializer):
"""
Serializer for EncodedVideo object.
Uses the profile_name as it's profile value instead of a Profile object.
"""
profile = serializers.SlugRelatedField(slug_field="profile_name")
class Meta: # pylint: disable=C1001, C0111
model = EncodedVideo
fields = (
"created",
"modified",
"url",
"file_size",
"bitrate",
"profile",
)
def get_identity(self, data):
"""
This hook is required for bulk update.
We need to override the default, to use the slug as the identity.
"""
return data.get('profile', None)
class SubtitleSerializer(serializers.ModelSerializer):
"""
Serializer for Subtitle objects
"""
content_url = serializers.CharField(source='get_absolute_url', read_only=True)
content = serializers.CharField(write_only=True)
def validate_content(self, attrs, source):
"""
Validate that the subtitle is in the correct format
"""
value = attrs[source]
if attrs.get('fmt') == 'sjson':
import json
try:
loaded = json.loads(value)
except ValueError:
raise serializers.ValidationError("Not in JSON format")
else:
attrs[source] = json.dumps(loaded)
return attrs
class Meta: # pylint: disable=C1001, C0111
model = Subtitle
lookup_field = "id"
fields = (
"fmt",
"language",
"content_url",
"content",
)
class CourseSerializer(serializers.RelatedField):
"""
Field for CourseVideo
"""
def to_native(self, value):
return value.course_id
def from_native(self, data):
if data:
course_video = CourseVideo(course_id=data)
course_video.full_clean(exclude=["video"])
return course_video
class VideoSerializer(serializers.ModelSerializer):
"""
Serializer for Video object
encoded_videos takes a list of dicts EncodedVideo data.
"""
encoded_videos = EncodedVideoSerializer(many=True, allow_add_remove=True)
subtitles = SubtitleSerializer(many=True, allow_add_remove=True, required=False)
courses = CourseSerializer(many=True, read_only=False)
url = serializers.SerializerMethodField('get_url')
class Meta: # pylint: disable=C1001, C0111
model = Video
lookup_field = "edx_video_id"
exclude = ('id',)
def get_url(self, obj):
"""
Return relative url for the object
"""
return obj.get_absolute_url()
def restore_fields(self, data, files):
"""
Overridden function used to check against duplicate profile names.
Converts a dictionary of data into a dictionary of deserialized fields. Also
checks if there are duplicate profile_name(s). If there is, the deserialization
is rejected.
"""
reverted_data = {}
if data is not None and not isinstance(data, dict):
self._errors['non_field_errors'] = ['Invalid data']
return None
try:
profiles = [ev["profile"] for ev in data.get("encoded_videos", [])]
if len(profiles) != len(set(profiles)):
self._errors['non_field_errors'] = ['Invalid data: duplicate profiles']
except KeyError:
raise ValidationError("profile required for deserializing")
except TypeError:
raise ValidationError("profile field needs to be a profile_name (str)")
for field_name, field in self.fields.items():
field.initialize(parent=self, field_name=field_name)
try:
field.field_from_native(data, files, field_name, reverted_data)
except ValidationError as err:
self._errors[field_name] = list(err.messages)
return reverted_data
| GbalsaC/bitnamiP | edx-val/edxval/serializers.py | Python | agpl-3.0 | 4,408 | 0.001361 |
from django.conf.urls.defaults import patterns, url
urlpatterns = ()
| praekelt/jmbo-janrain | janrain/urls.py | Python | bsd-3-clause | 70 | 0 |
# Import a whole load of stuff
from System.IO import *
from System.Drawing import *
from System.Runtime.Remoting import *
from System.Threading import *
from System.Windows.Forms import *
from System.Xml.Serialization import *
from System import *
from Analysis.EDM import *
from DAQ.Environment import *
from EDMConfig import *
def saveBlockConfig(path, config):
fs = FileStream(path, FileMode.Create)
s = XmlSerializer(BlockConfig)
s.Serialize(fs,config)
fs.Close()
def loadBlockConfig(path):
fs = FileStream(path, FileMode.Open)
s = XmlSerializer(BlockConfig)
bc = s.Deserialize(fs)
fs.Close()
return bc
def writeLatestBlockNotificationFile(cluster, blockIndex):
fs = FileStream(Environs.FileSystem.Paths["settingsPath"] + "\\BlockHead\\latestBlock.txt", FileMode.Create)
sw = StreamWriter(fs)
sw.WriteLine(cluster + "\t" + str(blockIndex))
sw.Close()
fs.Close()
def checkYAGAndFix():
interlockFailed = hc.YAGInterlockFailed;
if (interlockFailed):
bh.StopPattern();
bh.StartPattern();
def printWaveformCode(bc, name):
print(name + ": " + str(bc.GetModulationByName(name).Waveform.Code) + " -- " + str(bc.GetModulationByName(name).Waveform.Inverted))
def prompt(text):
sys.stdout.write(text)
return sys.stdin.readline().strip()
def measureParametersAndMakeBC(cluster, eState, bState, rfState, scramblerV, probePolAngle, pumpPolAngle):
fileSystem = Environs.FileSystem
print("Measuring parameters ...")
bh.StopPattern()
hc.UpdateRFPowerMonitor()
hc.UpdateRFFrequencyMonitor()
bh.StartPattern()
hc.UpdateBCurrentMonitor()
hc.UpdateVMonitor()
hc.UpdateI2AOMFreqMonitor()
print("V plus: " + str(hc.CPlusMonitorVoltage * hc.CPlusMonitorScale))
print("V minus: " + str(hc.CMinusMonitorVoltage * hc.CMinusMonitorScale))
print("Bias: " + str(hc.BiasCurrent))
print("B step: " + str(abs(hc.FlipStepCurrent)))
print("DB step: " + str(abs(hc.CalStepCurrent)))
# load a default BlockConfig and customise it appropriately
settingsPath = fileSystem.Paths["settingsPath"] + "\\BlockHead\\"
bc = loadBlockConfig(settingsPath + "default.xml")
bc.Settings["cluster"] = cluster
bc.Settings["eState"] = eState
bc.Settings["bState"] = bState
bc.Settings["rfState"] = rfState
bc.Settings["phaseScramblerV"] = scramblerV
bc.Settings["probePolarizerAngle"] = probePolAngle
bc.Settings["pumpPolarizerAngle"] = pumpPolAngle
bc.Settings["ePlus"] = hc.CPlusMonitorVoltage * hc.CPlusMonitorScale
bc.Settings["eMinus"] = hc.CMinusMonitorVoltage * hc.CMinusMonitorScale
bc.GetModulationByName("B").Centre = (hc.BiasCurrent)/1000
bc.GetModulationByName("B").Step = abs(hc.FlipStepCurrent)/1000
bc.GetModulationByName("DB").Step = abs(hc.CalStepCurrent)/1000
# these next 3, seemingly redundant, lines are to preserve backward compatibility
bc.GetModulationByName("B").PhysicalCentre = (hc.BiasCurrent)/1000
bc.GetModulationByName("B").PhysicalStep = abs(hc.FlipStepCurrent)/1000
bc.GetModulationByName("DB").PhysicalStep = abs(hc.CalStepCurrent)/1000
bc.GetModulationByName("RF1A").Centre = hc.RF1AttCentre
bc.GetModulationByName("RF1A").Step = hc.RF1AttStep
bc.GetModulationByName("RF1A").PhysicalCentre = hc.RF1PowerCentre
bc.GetModulationByName("RF1A").PhysicalStep = hc.RF1PowerStep
bc.GetModulationByName("RF2A").Centre = hc.RF2AttCentre
bc.GetModulationByName("RF2A").Step = hc.RF2AttStep
bc.GetModulationByName("RF2A").PhysicalCentre = hc.RF2PowerCentre
bc.GetModulationByName("RF2A").PhysicalStep = hc.RF2PowerStep
bc.GetModulationByName("RF1F").Centre = hc.RF1FMCentre
bc.GetModulationByName("RF1F").Step = hc.RF1FMStep
bc.GetModulationByName("RF1F").PhysicalCentre = hc.RF1FrequencyCentre
bc.GetModulationByName("RF1F").PhysicalStep = hc.RF1FrequencyStep
bc.GetModulationByName("RF2F").Centre = hc.RF2FMCentre
bc.GetModulationByName("RF2F").Step = hc.RF2FMStep
bc.GetModulationByName("RF2F").PhysicalCentre = hc.RF2FrequencyCentre
bc.GetModulationByName("RF2F").PhysicalStep = hc.RF2FrequencyStep
bc.GetModulationByName("LF1").Centre = hc.FLPZTVoltage
bc.GetModulationByName("LF1").Step = hc.FLPZTStep
bc.GetModulationByName("LF1").PhysicalCentre = hc.I2LockAOMFrequencyCentre
bc.GetModulationByName("LF1").PhysicalStep = hc.I2LockAOMFrequencyStep
# generate the waveform codes
print("Generating waveform codes ...")
eWave = bc.GetModulationByName("E").Waveform
eWave.Name = "E"
lf1Wave = bc.GetModulationByName("LF1").Waveform
lf1Wave.Name = "LF1"
ws = WaveformSetGenerator.GenerateWaveforms( (eWave, lf1Wave), ("B","DB","PI","RF1A","RF2A","RF1F","RF2F") )
bc.GetModulationByName("B").Waveform = ws["B"]
bc.GetModulationByName("DB").Waveform = ws["DB"]
bc.GetModulationByName("PI").Waveform = ws["PI"]
bc.GetModulationByName("RF1A").Waveform = ws["RF1A"]
bc.GetModulationByName("RF2A").Waveform = ws["RF2A"]
bc.GetModulationByName("RF1F").Waveform = ws["RF1F"]
bc.GetModulationByName("RF2F").Waveform = ws["RF2F"]
# change the inversions of the static codes E and LF1
bc.GetModulationByName("E").Waveform.Inverted = WaveformSetGenerator.RandomBool()
bc.GetModulationByName("LF1").Waveform.Inverted = WaveformSetGenerator.RandomBool()
# print the waveform codes
# printWaveformCode(bc, "E")
# printWaveformCode(bc, "B")
# printWaveformCode(bc, "DB")
# printWaveformCode(bc, "PI")
# printWaveformCode(bc, "RF1A")
# printWaveformCode(bc, "RF2A")
# printWaveformCode(bc, "RF1F")
# printWaveformCode(bc, "RF2F")
# printWaveformCode(bc, "LF1")
# store e-switch info in block config
print("Storing E switch parameters ...")
bc.Settings["eRampDownTime"] = hc.ERampDownTime
bc.Settings["eRampDownDelay"] = hc.ERampDownDelay
bc.Settings["eBleedTime"] = hc.EBleedTime
bc.Settings["eSwitchTime"] = hc.ESwitchTime
bc.Settings["eRampUpTime"] = hc.ERampUpTime
bc.Settings["eRampUpDelay"] = hc.ERampUpDelay
# this is for legacy analysis compatibility
bc.Settings["eDischargeTime"] = hc.ERampDownTime + hc.ERampDownDelay
bc.Settings["eChargeTime"] = hc.ERampUpTime + hc.ERampUpDelay
# store the E switch asymmetry in the block
bc.Settings["E0PlusBoost"] = hc.E0PlusBoost
return bc
# lock gains
# microamps of current per volt of control input
kSteppingBiasCurrentPerVolt = 1000.0
# max change in the b-bias voltage per block
kBMaxChange = 0.05
# volts of rf*a input required per cal's worth of offset
kRFAVoltsPerCal = 3.2
kRFAMaxChange = 0.1
# volts of rf*f input required per cal's worth of offset
kRFFVoltsPerCal = 8
kRFFMaxChange = 0.1
def updateLocks(bState):
pmtChannelValues = bh.DBlock.ChannelValues[0]
# note the weird python syntax for a one element list
sigIndex = pmtChannelValues.GetChannelIndex(("SIG",))
sigValue = pmtChannelValues.GetValue(sigIndex)
bIndex = pmtChannelValues.GetChannelIndex(("B",))
bValue = pmtChannelValues.GetValue(bIndex)
#bError = pmtChannelValues.GetError(bIndex)
dbIndex = pmtChannelValues.GetChannelIndex(("DB",))
dbValue = pmtChannelValues.GetValue(dbIndex)
#dbError = pmtChannelValues.GetError(dbIndex)
rf1aIndex = pmtChannelValues.GetChannelIndex(("RF1A","DB"))
rf1aValue = pmtChannelValues.GetValue(rf1aIndex)
#rf1aError = pmtChannelValues.GetError(rf1aIndex)
rf2aIndex = pmtChannelValues.GetChannelIndex(("RF2A","DB"))
rf2aValue = pmtChannelValues.GetValue(rf2aIndex)
#rf2aError = pmtChannelValues.GetError(rf2aIndex)
rf1fIndex = pmtChannelValues.GetChannelIndex(("RF1F","DB"))
rf1fValue = pmtChannelValues.GetValue(rf1fIndex)
#rf1fError = pmtChannelValues.GetError(rf1fIndex)
rf2fIndex = pmtChannelValues.GetChannelIndex(("RF2F","DB"))
rf2fValue = pmtChannelValues.GetValue(rf2fIndex)
#rf2fError = pmtChannelValues.GetError(rf2fIndex)
lf1Index = pmtChannelValues.GetChannelIndex(("LF1",))
lf1Value = pmtChannelValues.GetValue(lf1Index)
#lf1Error = pmtChannelValues.GetError(lf1Index)
lf1dbIndex = pmtChannelValues.GetChannelIndex(("LF1","DB"))
lf1dbValue = pmtChannelValues.GetValue(lf1dbIndex)
print "SIG: " + str(sigValue)
print "B: " + str(bValue) + " DB: " + str(dbValue)
print "RF1A: " + str(rf1aValue) + " RF2A: " + str(rf2aValue)
print "RF1F: " + str(rf1fValue) + " RF2F: " + str(rf2fValue)
print "LF1: " + str(lf1Value) + " LF1.DB: " + str(lf1dbValue)
# B bias lock
# the sign of the feedback depends on the b-state
if bState:
feedbackSign = 1
else:
feedbackSign = -1
deltaBias = - (1.0/8.0) * feedbackSign * (hc.CalStepCurrent * (bValue / dbValue)) / kSteppingBiasCurrentPerVolt
deltaBias = windowValue(deltaBias, -kBMaxChange, kBMaxChange)
print "Attempting to change stepping B bias by " + str(deltaBias) + " V."
newBiasVoltage = windowValue( hc.SteppingBiasVoltage - deltaBias, 0, 5)
hc.SetSteppingBBiasVoltage( newBiasVoltage )
# RFA locks
deltaRF1A = - (1.0/3.0) * (rf1aValue / dbValue) * kRFAVoltsPerCal
deltaRF1A = windowValue(deltaRF1A, -kRFAMaxChange, kRFAMaxChange)
print "Attempting to change RF1A by " + str(deltaRF1A) + " V."
newRF1A = windowValue( hc.RF1AttCentre - deltaRF1A, hc.RF1AttStep, 5 - hc.RF1AttStep)
hc.SetRF1AttCentre( newRF1A )
#
deltaRF2A = - (1.0/3.0) * (rf2aValue / dbValue) * kRFAVoltsPerCal
deltaRF2A = windowValue(deltaRF2A, -kRFAMaxChange, kRFAMaxChange)
print "Attempting to change RF2A by " + str(deltaRF2A) + " V."
newRF2A = windowValue( hc.RF2AttCentre - deltaRF2A, hc.RF2AttStep, 5 - hc.RF2AttStep )
hc.SetRF2AttCentre( newRF2A )
# RFF locks
deltaRF1F = - (1.0/4.0) * (rf1fValue / dbValue) * kRFFVoltsPerCal
deltaRF1F = windowValue(deltaRF1F, -kRFFMaxChange, kRFFMaxChange)
print "Attempting to change RF1F by " + str(deltaRF1F) + " V."
newRF1F = windowValue( hc.RF1FMCentre - deltaRF1F, hc.RF1FMStep, 5 - hc.RF1FMStep)
hc.SetRF1FMCentre( newRF1F )
#
deltaRF2F = - (1.0/4.0) * (rf2fValue / dbValue) * kRFFVoltsPerCal
deltaRF2F = windowValue(deltaRF2F, -kRFFMaxChange, kRFFMaxChange)
print "Attempting to change RF2F by " + str(deltaRF2F) + " V."
newRF2F = windowValue( hc.RF2FMCentre - deltaRF2F, hc.RF2FMStep, 5 - hc.RF2FMStep )
hc.SetRF2FMCentre( newRF2F )
# Laser frequency lock (-ve multiplier in f0 mode and +ve in f1)
deltaLF1 = 1.25 * (lf1Value / dbValue) # I think this should be +ve (but that doesn't work)
deltaLF1 = windowValue(deltaLF1, -0.1, 0.1)
print "Attempting to change LF1 by " + str(deltaLF1) + " V."
newLF1 = windowValue( hc.FLPZTVoltage - deltaLF1, hc.FLPZTStep, 5 - hc.FLPZTStep )
hc.SetFLPZTVoltage( newLF1 )
def windowValue(value, minValue, maxValue):
if ( (value < maxValue) & (value > minValue) ):
return value
else:
if (value < minValue):
return minValue
else:
return maxValue
kTargetRotationPeriod = 10
kReZeroLeakageMonitorsPeriod = 10
r = Random()
def EDMGo():
# Setup
f = None
fileSystem = Environs.FileSystem
dataPath = fileSystem.GetDataDirectory(fileSystem.Paths["edmDataPath"])
settingsPath = fileSystem.Paths["settingsPath"] + "\\BlockHead\\"
print("Data directory is : " + dataPath)
print("")
suggestedClusterName = fileSystem.GenerateNextDataFileName()
sm.SelectProfile("Scan B")
# User inputs data
cluster = prompt("Cluster name [" + suggestedClusterName +"]: ")
if cluster == "":
cluster = suggestedClusterName
print("Using cluster " + suggestedClusterName)
eState = hc.EManualState
print("E-state: " + str(eState))
bState = hc.BManualState
print("B-state: " + str(bState))
rfState = hc.RFManualState
print("rf-state: " + str(rfState))
# this is to make sure the B current monitor is in a sensible state
hc.UpdateBCurrentMonitor()
# randomise Ramsey phase
scramblerV = 0.724774 * r.NextDouble()
hc.SetScramblerVoltage(scramblerV)
# randomise polarizations
probePolAngle = 360.0 * r.NextDouble()
hc.SetProbePolarizerAngle(probePolAngle)
pumpPolAngle = 360.0 * r.NextDouble()
hc.SetPumpPolarizerAngle(pumpPolAngle)
bc = measureParametersAndMakeBC(cluster, eState, bState, rfState, scramblerV, probePolAngle, pumpPolAngle)
# loop and take data
blockIndex = 0
maxBlockIndex = 10000
while blockIndex < maxBlockIndex:
print("Acquiring block " + str(blockIndex) + " ...")
# save the block config and load into blockhead
print("Saving temp config.")
bc.Settings["clusterIndex"] = blockIndex
tempConfigFile ='%(p)stemp%(c)s_%(i)s.xml' % {'p': settingsPath, 'c': cluster, 'i': blockIndex}
saveBlockConfig(tempConfigFile, bc)
System.Threading.Thread.Sleep(500)
print("Loading temp config.")
bh.LoadConfig(tempConfigFile)
# take the block and save it
print("Running ...")
bh.AcquireAndWait()
print("Done.")
blockPath = '%(p)s%(c)s_%(i)s.zip' % {'p': dataPath, 'c': cluster, 'i': blockIndex}
bh.SaveBlock(blockPath)
print("Saved block "+ str(blockIndex) + ".")
# give mma a chance to analyse the block
print("Notifying Mathematica and waiting ...")
writeLatestBlockNotificationFile(cluster, blockIndex)
System.Threading.Thread.Sleep(5000)
print("Done.")
# increment and loop
File.Delete(tempConfigFile)
checkYAGAndFix()
blockIndex = blockIndex + 1
updateLocks(bState)
# randomise Ramsey phase
scramblerV = 0.724774 * r.NextDouble()
hc.SetScramblerVoltage(scramblerV)
# randomise polarizations
probePolAngle = 360.0 * r.NextDouble()
hc.SetProbePolarizerAngle(probePolAngle)
pumpPolAngle = 360.0 * r.NextDouble()
hc.SetPumpPolarizerAngle(pumpPolAngle)
bc = measureParametersAndMakeBC(cluster, eState, bState, rfState, scramblerV, probePolAngle, pumpPolAngle)
hc.StepTarget(1)
# do things that need periodically doing
# if ((blockIndex % kTargetRotationPeriod) == 0):
# print("Rotating target.")
# hc.StepTarget(10)
pmtChannelValues = bh.DBlock.ChannelValues[0]
dbIndex = pmtChannelValues.GetChannelIndex(("DB",))
dbValue = pmtChannelValues.GetValue(dbIndex)
if (dbValue < 8.4):
print("Dodgy spot target rotation.")
hc.StepTarget(5)
if ((blockIndex % kReZeroLeakageMonitorsPeriod) == 0):
print("Recalibrating leakage monitors.")
hc.EnableEField( False )
System.Threading.Thread.Sleep(10000)
hc.EnableBleed( True )
System.Threading.Thread.Sleep(1000)
hc.EnableBleed( False )
System.Threading.Thread.Sleep(5000)
hc.CalibrateIMonitors()
hc.EnableEField( True )
bh.StopPattern()
def run_script():
EDMGo()
| jstammers/EDMSuite | EDMScripts/EDMLoop_neg_slope.py | Python | mit | 14,423 | 0.026555 |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
long_desc = '''
This package contains the ${name} Sphinx extension.
.. add description here ..
'''
requires = ['Sphinx>=0.6']
setup(
name='sphinxcontrib-${name}',
version='0.1',
url='http://bitbucket.org/birkenfeld/sphinx-contrib',
download_url='http://pypi.python.org/pypi/sphinxcontrib-${name}',
license='BSD',
author='${author}',
author_email='${author_email}',
description='Sphinx "${name}" extension',
long_description=long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Sphinx :: Extension',
#'Framework :: Sphinx :: Theme',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=requires,
namespace_packages=['sphinxcontrib'],
)
| Lemma1/MAC-POSTS | doc_builder/sphinx-contrib/_template/setup.py | Python | mit | 1,194 | 0.000838 |
from collections import defaultdict
from datetime import datetime
import gc
import json
import math
import random
import sys
from .config_sample import MAX_ITERATIONS, DEBUG, PRETTY_LOG, MAX_DOCS_PER_QUERY, SERP_SIZE, TRANSFORM_LOG, QUERY_INDEPENDENT_PAGER, DEFAULT_REL
class NotImplementedError(Exception):
pass
class ClickModel:
"""
An abstract click model interface.
"""
def __init__(self, ignoreIntents=True, ignoreLayout=True, config=None):
self.config = config if config is not None else {}
self.ignoreIntents = ignoreIntents
self.ignoreLayout = ignoreLayout
def train(self, sessions):
"""
Trains the model.
"""
pass
def test(self, sessions, reportPositionPerplexity=True):
"""
Evaluates the prediciton power of the click model for a given sessions.
Returns the log-likelihood, perplexity, position perplexity
(perplexity for each rank a.k.a. position in a SERP)
and separate perplexity values for clicks and non-clicks (skips).
"""
logLikelihood = 0.0
positionPerplexity = [0.0] * self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY)
positionPerplexityClickSkip = [[0.0, 0.0] \
for i in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))]
counts = [0] * self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY)
countsClickSkip = [[0, 0] \
for i in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))]
possibleIntents = [False] if self.ignoreIntents else [False, True]
for s in sessions:
iw = s.intentWeight
intentWeight = {False: 1.0} if self.ignoreIntents else {False: 1 - iw, True: iw}
clickProbs = self._get_click_probs(s, possibleIntents)
N = min(len(s.clicks), self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))
if self.config.get('DEBUG', DEBUG):
assert N > 1
x = sum(clickProbs[i][N // 2] * intentWeight[i] for i in possibleIntents) / sum(clickProbs[i][N // 2 - 1] * intentWeight[i] for i in possibleIntents)
s.clicks[N // 2] = 1 if s.clicks[N // 2] == 0 else 0
clickProbs2 = self._get_click_probs(s, possibleIntents)
y = sum(clickProbs2[i][N // 2] * intentWeight[i] for i in possibleIntents) / sum(clickProbs2[i][N // 2 - 1] * intentWeight[i] for i in possibleIntents)
assert abs(x + y - 1) < 0.01, (x, y)
# Marginalize over possible intents: P(C_1, ..., C_N) = \sum_{i} P(C_1, ..., C_N | I=i) P(I=i)
logLikelihood += math.log(sum(clickProbs[i][N - 1] * intentWeight[i] for i in possibleIntents)) / N
correctedRank = 0 # we are going to skip clicks on fake pager urls
for k, click in enumerate(s.clicks):
click = 1 if click else 0
if s.extraclicks.get('TRANSFORMED', False) and \
(k + 1) % (self.config.get('SERP_SIZE', SERP_SIZE) + 1) == 0:
if self.config.get('DEBUG', DEBUG):
assert s.results[k] == 'PAGER'
continue
# P(C_k | C_1, ..., C_{k-1}) = \sum_I P(C_1, ..., C_k | I) P(I) / \sum_I P(C_1, ..., C_{k-1} | I) P(I)
curClick = dict((i, clickProbs[i][k]) for i in possibleIntents)
prevClick = dict((i, clickProbs[i][k - 1]) for i in possibleIntents) if k > 0 else dict((i, 1.0) for i in possibleIntents)
logProb = math.log(sum(curClick[i] * intentWeight[i] for i in possibleIntents), 2) - math.log(sum(prevClick[i] * intentWeight[i] for i in possibleIntents), 2)
positionPerplexity[correctedRank] += logProb
positionPerplexityClickSkip[correctedRank][click] += logProb
counts[correctedRank] += 1
countsClickSkip[correctedRank][click] += 1
correctedRank += 1
positionPerplexity = [2 ** (-x / count if count else x) for (x, count) in zip(positionPerplexity, counts)]
positionPerplexityClickSkip = [[2 ** (-x[click] / (count[click] if count[click] else 1) if count else x) \
for (x, count) in zip(positionPerplexityClickSkip, countsClickSkip)] for click in xrange(2)]
perplexity = sum(positionPerplexity) / len(positionPerplexity)
if reportPositionPerplexity:
return logLikelihood / len(sessions), perplexity, positionPerplexity, positionPerplexityClickSkip
else:
return logLikelihood / len(sessions), perplexity
def _get_click_probs(self, s, possible_intents):
"""
Returns click probabilities list for a given list of s.clicks.
For each intent $i$ and each rank $k$ we have:
click_probs[i][k-1] = P(C_1, ..., C_k | I=i)
"""
click_probs = dict((i, [0.5 ** (k + 1) for k in xrange(len(s.clicks))]) for i in possible_intents)
return click_probs
def get_loglikelihood(self, sessions):
"""
Returns the average log-likelihood of the current model for given sessions.
This is a lightweight version of the self.test() method.
"""
return sum(self.get_log_click_probs(s) for s in sessions) / len(sessions)
def get_log_click_probs(self, session):
"""
Returns an average log-likelihood for a given session,
i.e. log-likelihood of all the click events, divided
by the number of documents in the session.
"""
possibleIntents = [False] if self.ignoreIntents else [False, True]
intentWeight = {False: 1.0} if self.ignoreIntents else \
{False: 1 - session.intentWeight, True: session.intentWeight}
clickProbs = self._get_click_probs(s, possibleIntents)
N = min(len(session.clicks), self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))
# Marginalize over possible intents: P(C_1, ..., C_N) = \sum_{i} P(C_1, ..., C_N | I=i) P(I=i)
return math.log(sum(clickProbs[i][N - 1] * intentWeight[i] for i in possibleIntents)) / N
def get_model_relevances(self, session, intent=False):
"""
Returns estimated relevance of each document in a given session
based on a trained click model.
"""
raise NotImplementedError
def predict_click_probs(self, session, intent=False):
"""
Predicts click probabilities for a given session. Does not use session.clicks.
This is a vector of P(C_k = 1 | E_k = 1) for different ranks $k$.
"""
raise NotImplementedError
def predict_stop_probs(self, session, intent=False):
"""
Predicts stop probabilities (after click) for each document in a session.
This is often referred to as satisfaction probability.
This is a vector of P(S_k = 1 | C_k = 1) for different ranks $k$.
"""
raise NotImplementedError
def get_abandonment_prob(self, rank, intent=False, layout=None):
"""
Predicts probability of stopping without click after examining document at rank `rank`.
"""
return 0.0
def generate_clicks(self, session):
"""
Generates clicks for a given session, assuming cascade examination order.
"""
clicks = [0] * len(session.results)
# First, randomly select user intent.
intent = False # non-vertical intent by default
if not self.ignoreIntents:
random_intent_prob = random.uniforma(0, 1)
if random_intent_prob < session.intentWeight:
intent = True
predicted_click_probs = self.predict_click_probs(session, intent)
predicted_stop_probs = self.predict_stop_probs(session, intent)
for rank, result in enumerate(session.results):
random_click_prob = random.uniform(0, 1)
clicks[rank] = 1 if random_click_prob < predicted_click_probs[rank] else 0
if clicks[rank] == 1:
random_stop_prob = random.uniform(0, 1)
if random_stop_prob < predicted_stop_probs[rank]:
break
else:
random_stop_prob = random.uniform(0, 1)
if random_stop_prob < self.get_abandonment_prob(rank, intent, session.layout):
break
return clicks
class DbnModel(ClickModel):
def __init__(self, gammas, ignoreIntents=True, ignoreLayout=True, config=None):
self.gammas = gammas
ClickModel.__init__(self, ignoreIntents, ignoreLayout, config)
def train(self, sessions):
possibleIntents = [False] if self.ignoreIntents else [False, True]
max_query_id = self.config.get('MAX_QUERY_ID')
if max_query_id is None:
print >>sys.stderr, 'WARNING: no MAX_QUERY_ID specified for', self
max_query_id = 100000
# intent -> query -> url -> (a_u, s_u)
self.urlRelevances = dict((i,
[defaultdict(lambda: {'a': self.config.get('DEFAULT_REL', DEFAULT_REL),
's': self.config.get('DEFAULT_REL', DEFAULT_REL)}) \
for q in xrange(max_query_id)]) for i in possibleIntents
)
# here we store distribution of posterior intent weights given train data
self.queryIntentsWeights = defaultdict(lambda: [])
# EM algorithm
if not self.config.get('PRETTY_LOG', PRETTY_LOG):
print >>sys.stderr, '-' * 80
print >>sys.stderr, 'Start. Current time is', datetime.now()
for iteration_count in xrange(self.config.get('MAX_ITERATIONS', MAX_ITERATIONS)):
# urlRelFractions[intent][query][url][r][1] --- coefficient before \log r
# urlRelFractions[intent][query][url][r][0] --- coefficient before \log (1 - r)
urlRelFractions = dict((i, [defaultdict(lambda: {'a': [1.0, 1.0], 's': [1.0, 1.0]}) for q in xrange(max_query_id)]) for i in [False, True])
self.queryIntentsWeights = defaultdict(lambda: [])
# E step
for s in sessions:
positionRelevances = {}
query = s.query
for intent in possibleIntents:
positionRelevances[intent] = {}
for r in ['a', 's']:
positionRelevances[intent][r] = [self.urlRelevances[intent][query][url][r] for url in s.results]
layout = [False] * len(s.layout) if self.ignoreLayout else s.layout
sessionEstimate = dict((intent, self._getSessionEstimate(positionRelevances[intent], layout, s.clicks, intent)) for intent in possibleIntents)
# P(I | C, G)
if self.ignoreIntents:
p_I__C_G = {False: 1, True: 0}
else:
a = sessionEstimate[False]['C'] * (1 - s.intentWeight)
b = sessionEstimate[True]['C'] * s.intentWeight
p_I__C_G = {False: a / (a + b), True: b / (a + b)}
self.queryIntentsWeights[query].append(p_I__C_G[True])
for k, url in enumerate(s.results):
for intent in possibleIntents:
# update a
urlRelFractions[intent][query][url]['a'][1] += sessionEstimate[intent]['a'][k] * p_I__C_G[intent]
urlRelFractions[intent][query][url]['a'][0] += (1 - sessionEstimate[intent]['a'][k]) * p_I__C_G[intent]
if s.clicks[k] != 0:
# Update s
urlRelFractions[intent][query][url]['s'][1] += sessionEstimate[intent]['s'][k] * p_I__C_G[intent]
urlRelFractions[intent][query][url]['s'][0] += (1 - sessionEstimate[intent]['s'][k]) * p_I__C_G[intent]
if not self.config.get('PRETTY_LOG', PRETTY_LOG):
sys.stderr.write('E')
# M step
# update parameters and record mean square error
sum_square_displacement = 0.0
Q_functional = 0.0
for i in possibleIntents:
for query, d in enumerate(urlRelFractions[i]):
if not d:
continue
for url, relFractions in d.iteritems():
a_u_new = relFractions['a'][1] / (relFractions['a'][1] + relFractions['a'][0])
sum_square_displacement += (a_u_new - self.urlRelevances[i][query][url]['a']) ** 2
self.urlRelevances[i][query][url]['a'] = a_u_new
Q_functional += relFractions['a'][1] * math.log(a_u_new) + relFractions['a'][0] * math.log(1 - a_u_new)
s_u_new = relFractions['s'][1] / (relFractions['s'][1] + relFractions['s'][0])
sum_square_displacement += (s_u_new - self.urlRelevances[i][query][url]['s']) ** 2
self.urlRelevances[i][query][url]['s'] = s_u_new
Q_functional += relFractions['s'][1] * math.log(s_u_new) + relFractions['s'][0] * math.log(1 - s_u_new)
if not self.config.get('PRETTY_LOG', PRETTY_LOG):
sys.stderr.write('M\n')
rmsd = math.sqrt(sum_square_displacement)
if self.config.get('PRETTY_LOG', PRETTY_LOG):
sys.stderr.write('%d..' % (iteration_count + 1))
else:
print >>sys.stderr, 'Iteration: %d, ERROR: %f' % (iteration_count + 1, rmsd)
print >>sys.stderr, 'Q functional: %f' % Q_functional
if self.config.get('PRETTY_LOG', PRETTY_LOG):
sys.stderr.write('\n')
for q, intentWeights in self.queryIntentsWeights.iteritems():
self.queryIntentsWeights[q] = sum(intentWeights) / len(intentWeights)
@staticmethod
def testBackwardForward():
positionRelevances = {'a': [0.5] * MAX_DOCS_PER_QUERY, 's': [0.5] * MAX_DOCS_PER_QUERY}
gammas = [0.9] * 4
layout = [False] * (MAX_DOCS_PER_QUERY + 1)
clicks = [0] * MAX_DOCS_PER_QUERY
alpha, beta = DbnModel.getForwardBackwardEstimates(positionRelevances, gammas, layout, clicks, False)
x = alpha[0][0] * beta[0][0] + alpha[0][1] * beta[0][1]
assert all(abs((a[0] * b[0] + a[1] * b[1]) / x - 1) < 0.00001 for a, b in zip(alpha, beta))
@staticmethod
def getGamma(gammas, k, layout, intent):
index = 2 * (1 if layout[k + 1] else 0) + (1 if intent else 0)
return gammas[index]
@staticmethod
def getForwardBackwardEstimates(positionRelevances, gammas, layout, clicks, intent,
debug=False):
N = len(clicks)
if debug:
assert N + 1 == len(layout)
alpha = [[0.0, 0.0] for i in xrange(N + 1)]
beta = [[0.0, 0.0] for i in xrange(N + 1)]
alpha[0] = [0.0, 1.0]
beta[N] = [1.0, 1.0]
# P(E_{k+1} = e, C_k | E_k = e', G, I)
updateMatrix = [[[0.0 for e1 in [0, 1]] for e in [0, 1]] for i in xrange(N)]
for k, C_k in enumerate(clicks):
a_u = positionRelevances['a'][k]
s_u = positionRelevances['s'][k]
gamma = DbnModel.getGamma(gammas, k, layout, intent)
if C_k == 0:
updateMatrix[k][0][0] = 1
updateMatrix[k][0][1] = (1 - gamma) * (1 - a_u)
updateMatrix[k][1][0] = 0
updateMatrix[k][1][1] = gamma * (1 - a_u)
else:
updateMatrix[k][0][0] = 0
updateMatrix[k][0][1] = (s_u + (1 - gamma) * (1 - s_u)) * a_u
updateMatrix[k][1][0] = 0
updateMatrix[k][1][1] = gamma * (1 - s_u) * a_u
for k in xrange(N):
for e in [0, 1]:
alpha[k + 1][e] = sum(alpha[k][e1] * updateMatrix[k][e][e1] for e1 in [0, 1])
beta[N - 1 - k][e] = sum(beta[N - k][e1] * updateMatrix[N - 1 - k][e1][e] for e1 in [0, 1])
return alpha, beta
def _getSessionEstimate(self, positionRelevances, layout, clicks, intent):
# Returns a dict that represents the following:
# {'a': P(A_k | I, C, G),
# 's': P(S_k | I, C, G),
# 'C': P(C | I, G),
# 'clicks': P(C_k | C_1, ..., C_{k-1}, I, G)}
# E.g., sessionEstimate['a'][k] = P(A_k = 1 | I = i, C, G).
N = len(clicks)
if self.config.get('DEBUG', DEBUG):
assert N + 1 == len(layout)
sessionEstimate = {'a': [0.0] * N, 's': [0.0] * N, 'e': [[0.0, 0.0] for k in xrange(N)], 'C': 0.0, 'clicks': [0.0] * N}
alpha, beta = self.getForwardBackwardEstimates(positionRelevances,
self.gammas, layout, clicks, intent,
debug=self.config.get('DEBUG', DEBUG)
)
try:
varphi = [((a[0] * b[0]) / (a[0] * b[0] + a[1] * b[1]), (a[1] * b[1]) / (a[0] * b[0] + a[1] * b[1])) for a, b in zip(alpha, beta)]
except ZeroDivisionError:
print >>sys.stderr, alpha, beta, [(a[0] * b[0] + a[1] * b[1]) for a, b in zip(alpha, beta)], positionRelevances
sys.exit(1)
if self.config.get('DEBUG', DEBUG):
assert all(ph[0] < 0.01 for ph, c in zip(varphi[:N], clicks) if c != 0), (alpha, beta, varphi, clicks)
# calculate P(C | I, G) for k = 0
sessionEstimate['C'] = alpha[0][0] * beta[0][0] + alpha[0][1] * beta[0][1] # == 0 + 1 * beta[0][1]
for k, C_k in enumerate(clicks):
a_u = positionRelevances['a'][k]
s_u = positionRelevances['s'][k]
gamma = self.getGamma(self.gammas, k, layout, intent)
# E_k_multiplier --- P(S_k = 0 | C_k) P(C_k | E_k = 1)
if C_k == 0:
sessionEstimate['a'][k] = a_u * varphi[k][0]
sessionEstimate['s'][k] = 0.0
else:
sessionEstimate['a'][k] = 1.0
sessionEstimate['s'][k] = varphi[k + 1][0] * s_u / (s_u + (1 - gamma) * (1 - s_u))
# P(C_1, ..., C_k | I)
sessionEstimate['clicks'][k] = sum(alpha[k + 1])
return sessionEstimate
def _get_click_probs(self, s, possibleIntents):
"""
Returns clickProbs list:
clickProbs[i][k] = P(C_1, ..., C_k | I=i)
"""
# TODO: ensure that s.clicks[l] not used to calculate clickProbs[i][k] for l >= k
positionRelevances = {}
for intent in possibleIntents:
positionRelevances[intent] = {}
for r in ['a', 's']:
positionRelevances[intent][r] = [self.urlRelevances[intent][s.query][url][r] for url in s.results]
if self.config.get('QUERY_INDEPENDENT_PAGER', QUERY_INDEPENDENT_PAGER):
for k, u in enumerate(s.results):
if u == 'PAGER':
# use dummy 0 query for all fake pager URLs
positionRelevances[intent][r][k] = self.urlRelevances[intent][0][url][r]
layout = [False] * len(s.layout) if self.ignoreLayout else s.layout
return dict((i, self._getSessionEstimate(positionRelevances[i], layout, s.clicks, i)['clicks']) for i in possibleIntents)
def get_model_relevances(self, session, intent=False):
"""
Returns estimated relevance of each document in a given session
based on a trained click model.
You can make use of the fact that model trains different relevances
for different intents by specifying `intent` argument. If it is set
to False, simple web relevance is returned, if it is to True, then
vertical relevance is returned, i.e., how relevant each document
is to a vertical intent.
"""
relevances = []
for rank, result in enumerate(session.results):
a = self.urlRelevances[intent][session.query][result]['a']
s = self.urlRelevances[intent][session.query][result]['s']
relevances.append(a * s)
return relevances
def predict_click_probs(self, session, intent=False):
"""
Predicts click probabilities for a given session. Does not use clicks.
"""
click_probs = []
for rank, result in enumerate(session.results):
a = self.urlRelevances[intent][session.query][result]['a']
click_probs.append(a)
return click_probs
def predict_stop_probs(self, session, intent=False):
"""
Predicts stop probabilities for each document in a session.
"""
stop_probs = []
for rank, result in enumerate(session.results):
s = self.urlRelevances[intent][session.query][result]['s']
stop_probs.append(s)
return stop_probs
def get_abandonment_prob(self, rank, intent=False, layout=None):
"""
Predicts probability of stopping without click after examining document at rank `rank`.
"""
return 1.0 - self.getGamma(self.gammas, rank, layout, intent)
class SimplifiedDbnModel(DbnModel):
def __init__(self, ignoreIntents=True, ignoreLayout=True, config=None):
assert ignoreIntents
assert ignoreLayout
DbnModel.__init__(self, (1.0, 1.0, 1.0, 1.0), ignoreIntents, ignoreLayout, config)
def train(self, sessions):
max_query_id = self.config.get('MAX_QUERY_ID')
if max_query_id is None:
print >>sys.stderr, 'WARNING: no MAX_QUERY_ID specified for', self
max_query_id = 100000
urlRelFractions = [defaultdict(lambda: {'a': [1.0, 1.0], 's': [1.0, 1.0]}) for q in xrange(max_query_id)]
for s in sessions:
query = s.query
lastClickedPos = len(s.clicks) - 1
for k, c in enumerate(s.clicks):
if c != 0:
lastClickedPos = k
for k, (u, c) in enumerate(zip(s.results, s.clicks[:(lastClickedPos + 1)])):
tmpQuery = query
if self.config.get('QUERY_INDEPENDENT_PAGER', QUERY_INDEPENDENT_PAGER) \
and u == 'PAGER':
assert self.config.get('TRANSFORM_LOG', TRANSFORM_LOG)
# the same dummy query for all pagers
query = 0
if c != 0:
urlRelFractions[query][u]['a'][1] += 1
if k == lastClickedPos:
urlRelFractions[query][u]['s'][1] += 1
else:
urlRelFractions[query][u]['s'][0] += 1
else:
urlRelFractions[query][u]['a'][0] += 1
if self.config.get('QUERY_INDEPENDENT_PAGER', QUERY_INDEPENDENT_PAGER):
query = tmpQuery
self.urlRelevances = dict((i,
[defaultdict(lambda: {'a': self.config.get('DEFAULT_REL', DEFAULT_REL),
's': self.config.get('DEFAULT_REL', DEFAULT_REL)}) \
for q in xrange(max_query_id)]) for i in [False])
for query, d in enumerate(urlRelFractions):
if not d:
continue
for url, relFractions in d.iteritems():
self.urlRelevances[False][query][url]['a'] = relFractions['a'][1] / (relFractions['a'][1] + relFractions['a'][0])
self.urlRelevances[False][query][url]['s'] = relFractions['s'][1] / (relFractions['s'][1] + relFractions['s'][0])
class UbmModel(ClickModel):
gammaTypesNum = 4
def __init__(self, ignoreIntents=True, ignoreLayout=True, explorationBias=False,
config=None):
self.explorationBias = explorationBias
ClickModel.__init__(self, ignoreIntents, ignoreLayout, config)
def train(self, sessions):
max_query_id = self.config.get('MAX_QUERY_ID')
if max_query_id is None:
print >>sys.stderr, 'WARNING: no MAX_QUERY_ID specified for', self
max_query_id = 100000
possibleIntents = [False] if self.ignoreIntents else [False, True]
# alpha: intent -> query -> url -> "attractiveness probability"
self.alpha = dict((i,
[defaultdict(lambda: self.config.get('DEFAULT_REL', DEFAULT_REL)) \
for q in xrange(max_query_id)]) for i in possibleIntents)
# gamma: freshness of the current result: gammaType -> rank -> "distance from the last click" - 1 -> examination probability
self.gamma = [[[0.5 \
for d in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))] \
for r in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))] \
for g in xrange(self.gammaTypesNum)]
if self.explorationBias:
self.e = [0.5 \
for p in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))]
if not self.config.get('PRETTY_LOG', PRETTY_LOG):
print >>sys.stderr, '-' * 80
print >>sys.stderr, 'Start. Current time is', datetime.now()
for iteration_count in xrange(self.config.get('MAX_ITERATIONS', MAX_ITERATIONS)):
self.queryIntentsWeights = defaultdict(lambda: [])
# not like in DBN! xxxFractions[0] is a numerator while xxxFraction[1] is a denominator
alphaFractions = dict((i, [defaultdict(lambda: [1.0, 2.0]) for q in xrange(max_query_id)]) for i in possibleIntents)
gammaFractions = [[[[1.0, 2.0] \
for d in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))] \
for r in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))] \
for g in xrange(self.gammaTypesNum)]
if self.explorationBias:
eFractions = [[1.0, 2.0] \
for p in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))]
# E-step
for s in sessions:
query = s.query
layout = [False] * len(s.layout) if self.ignoreLayout else s.layout
if self.explorationBias:
explorationBiasPossible = any((l and c for (l, c) in zip(s.layout, s.clicks)))
firstVerticalPos = -1 if not any(s.layout[:-1]) else [k for (k, l) in enumerate(s.layout) if l][0]
if self.ignoreIntents:
p_I__C_G = {False: 1.0, True: 0}
else:
a = self._getSessionProb(s) * (1 - s.intentWeight)
b = 1 * s.intentWeight
p_I__C_G = {False: a / (a + b), True: b / (a + b)}
self.queryIntentsWeights[query].append(p_I__C_G[True])
prevClick = -1
for rank, c in enumerate(s.clicks):
url = s.results[rank]
for intent in possibleIntents:
a = self.alpha[intent][query][url]
if self.explorationBias and explorationBiasPossible:
e = self.e[firstVerticalPos]
if c == 0:
g = self.getGamma(self.gamma, rank, prevClick, layout, intent)
gCorrection = 1
if self.explorationBias and explorationBiasPossible and not s.layout[k]:
gCorrection = 1 - e
g *= gCorrection
alphaFractions[intent][query][url][0] += a * (1 - g) / (1 - a * g) * p_I__C_G[intent]
self.getGamma(gammaFractions, rank, prevClick, layout, intent)[0] += g / gCorrection * (1 - a) / (1 - a * g) * p_I__C_G[intent]
if self.explorationBias and explorationBiasPossible:
eFractions[firstVerticalPos][0] += (e if s.layout[k] else e / (1 - a * g)) * p_I__C_G[intent]
else:
alphaFractions[intent][query][url][0] += 1 * p_I__C_G[intent]
self.getGamma(gammaFractions, rank, prevClick, layout, intent)[0] += 1 * p_I__C_G[intent]
if self.explorationBias and explorationBiasPossible:
eFractions[firstVerticalPos][0] += (e if s.layout[k] else 0) * p_I__C_G[intent]
alphaFractions[intent][query][url][1] += 1 * p_I__C_G[intent]
self.getGamma(gammaFractions, rank, prevClick, layout, intent)[1] += 1 * p_I__C_G[intent]
if self.explorationBias and explorationBiasPossible:
eFractions[firstVerticalPos][1] += 1 * p_I__C_G[intent]
if c != 0:
prevClick = rank
if not self.config.get('PRETTY_LOG', PRETTY_LOG):
sys.stderr.write('E')
# M-step
sum_square_displacement = 0.0
for i in possibleIntents:
for q in xrange(max_query_id):
for url, aF in alphaFractions[i][q].iteritems():
new_alpha = aF[0] / aF[1]
sum_square_displacement += (self.alpha[i][q][url] - new_alpha) ** 2
self.alpha[i][q][url] = new_alpha
for g in xrange(self.gammaTypesNum):
for r in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY)):
for d in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY)):
gF = gammaFractions[g][r][d]
new_gamma = gF[0] / gF[1]
sum_square_displacement += (self.gamma[g][r][d] - new_gamma) ** 2
self.gamma[g][r][d] = new_gamma
if self.explorationBias:
for p in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY)):
new_e = eFractions[p][0] / eFractions[p][1]
sum_square_displacement += (self.e[p] - new_e) ** 2
self.e[p] = new_e
if not self.config.get('PRETTY_LOG', PRETTY_LOG):
sys.stderr.write('M\n')
rmsd = math.sqrt(sum_square_displacement)
if self.config.get('PRETTY_LOG', PRETTY_LOG):
sys.stderr.write('%d..' % (iteration_count + 1))
else:
print >>sys.stderr, 'Iteration: %d, ERROR: %f' % (iteration_count + 1, rmsd)
if self.config.get('PRETTY_LOG', PRETTY_LOG):
sys.stderr.write('\n')
for q, intentWeights in self.queryIntentsWeights.iteritems():
self.queryIntentsWeights[q] = sum(intentWeights) / len(intentWeights)
def _getSessionProb(self, s):
clickProbs = self._get_click_probs(s, [False, True])
N = len(s.clicks)
return clickProbs[False][N - 1] / clickProbs[True][N - 1]
@staticmethod
def getGamma(gammas, k, prevClick, layout, intent):
index = (2 if layout[k] else 0) + (1 if intent else 0)
return gammas[index][k][k - prevClick - 1]
def _get_click_probs(self, s, possibleIntents):
"""
Returns clickProbs list
clickProbs[i][k] = P(C_1, ..., C_k | I=i)
"""
clickProbs = dict((i, []) for i in possibleIntents)
firstVerticalPos = -1 if not any(s.layout[:-1]) else [k for (k, l) in enumerate(s.layout) if l][0]
prevClick = -1
layout = [False] * len(s.layout) if self.ignoreLayout else s.layout
for rank, c in enumerate(s.clicks):
url = s.results[rank]
prob = {False: 0.0, True: 0.0}
for i in possibleIntents:
a = self.alpha[i][s.query][url]
g = self.getGamma(self.gamma, rank, prevClick, layout, i)
if self.explorationBias and any(s.layout[k] and s.clicks[k] for k in xrange(rank)) and not s.layout[rank]:
g *= 1 - self.e[firstVerticalPos]
prevProb = 1 if rank == 0 else clickProbs[i][-1]
if c == 0:
clickProbs[i].append(prevProb * (1 - a * g))
else:
clickProbs[i].append(prevProb * a * g)
if c != 0:
prevClick = rank
return clickProbs
class EbUbmModel(UbmModel):
def __init__(self, ignoreIntents=True, ignoreLayout=True, config=None):
UbmModel.__init__(self, ignoreIntents, ignoreLayout, explorationBias=True,
config=config)
class DcmModel(ClickModel):
gammaTypesNum = 4
def train(self, sessions):
max_query_id = self.config.get('MAX_QUERY_ID')
if max_query_id is None:
print >>sys.stderr, 'WARNING: no MAX_QUERY_ID specified for', self
max_query_id = 100000
possibleIntents = [False] if self.ignoreIntents else [False, True]
urlRelFractions = dict((i, [defaultdict(lambda: [1.0, 1.0]) for q in xrange(max_query_id)]) for i in possibleIntents)
gammaFractions = [[[1.0, 1.0] for g in xrange(self.gammaTypesNum)] \
for r in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))]
for s in sessions:
query = s.query
layout = [False] * len(s.layout) if self.ignoreLayout else s.layout
lastClickedPos = self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY) - 1
for k, c in enumerate(s.clicks):
if c != 0:
lastClickedPos = k
intentWeights = {False: 1.0} if self.ignoreIntents else {False: 1 - s.intentWeight, True: s.intentWeight}
for k, (u, c) in enumerate(zip(s.results, s.clicks[:(lastClickedPos + 1)])):
for i in possibleIntents:
if c != 0:
urlRelFractions[i][query][u][1] += intentWeights[i]
if k == lastClickedPos:
self.getGamma(gammaFractions[k], k, layout, i)[1] += intentWeights[i]
else:
self.getGamma(gammaFractions[k], k, layout, i)[0] += intentWeights[i]
else:
urlRelFractions[i][query][u][0] += intentWeights[i]
self.urlRelevances = dict((i,
[defaultdict(lambda: self.config.get('DEFAULT_REL', DEFAULT_REL)) \
for q in xrange(max_query_id)]) for i in possibleIntents)
self.gammas = [[0.5 for g in xrange(self.gammaTypesNum)] \
for r in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY))]
for i in possibleIntents:
for query, d in enumerate(urlRelFractions[i]):
if not d:
continue
for url, relFractions in d.iteritems():
self.urlRelevances[i][query][url] = relFractions[1] / (relFractions[1] + relFractions[0])
for k in xrange(self.config.get('MAX_DOCS_PER_QUERY', MAX_DOCS_PER_QUERY)):
for g in xrange(self.gammaTypesNum):
self.gammas[k][g] = gammaFractions[k][g][0] / (gammaFractions[k][g][0] + gammaFractions[k][g][1])
def _get_click_probs(self, s, possibleIntents):
clickProbs = {False: [], True: []} # P(C_1, ..., C_k)
query = s.query
layout = [False] * len(s.layout) if self.ignoreLayout else s.layout
for i in possibleIntents:
examinationProb = 1.0 # P(C_1, ..., C_{k - 1}, E_k = 1)
for k, c in enumerate(s.clicks):
r = self.urlRelevances[i][query][s.results[k]]
prevProb = 1 if k == 0 else clickProbs[i][-1]
if c == 0:
# P(C_1, ..., C_k = 0) = P(C_1, ..., C_{k-1}) - P(C_1, ..., C_k = 1)
clickProbs[i].append(prevProb - examinationProb * r)
# P(C_1, ..., C_k, E_{k+1} = 1) = P(E_{k+1} = 1 | C_k, E_k = 1) * P(C_k | E_k = 1) * P(C_1, ..., C_{k - 1}, E_k = 1)
examinationProb *= 1 - r
else:
clickProbs[i].append(examinationProb * r)
# P(C_1, ..., C_k, E_{k+1} = 1) = P(E_{k+1} = 1 | C_k, E_k = 1) * P(C_k | E_k = 1) * P(C_1, ..., C_{k - 1}, E_k = 1)
examinationProb *= self.getGamma(self.gammas[k], k, layout, i) * r
return clickProbs
@staticmethod
def getGamma(gammas, k, layout, intent):
return DbnModel.getGamma(gammas, k, layout, intent)
| varepsilon/clickmodels | clickmodels/inference.py | Python | bsd-3-clause | 36,839 | 0.005076 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import os
import pickle
from pkg_resources import resource_filename
from nupic.regions.record_sensor import RecordSensor
from nupic.data.file_record_stream import FileRecordStream
"""
Generate column statistics for a StandardSource.
Each entry in statsInfo corresponds to one column, and contains a list
of statistics that should be computed for that column. Known statistics
are:
for floating point or integer values:
number -- min, max, mean
for string or integer values:
category -- list of all unique values and count
The model for a stats object is that you call the constructor with
the first value, and then add values with add().
(The alternative would be no args for the constructor, and
all values would be added with add()).
There are two reasons for this:
- no initialization check required every time we add a value
- getStats() can always return a valid result
"""
class NumberStatsCollector(object):
validTypes = [int, float]
def __init__(self):
self.min = 0
self.max = 0
self.sum = 0
self.n = 0
self.initialized = False
def _addFirst(self, value):
if type(value) not in self.validTypes:
raise RuntimeError("NumberStatsCollector -- value '%s' is not a valid type" % value)
value = float(value)
self.min = value
self.max = value
self.sum = value
self.n = 1
self.initialized = True
def add(self, value):
if not self.initialized:
self._addFirst(value)
return
value = float(value)
if value < self.min:
self.min = value
if value > self.max:
self.max = value
self.sum += value
self.n += 1
def getStats(self):
return dict(min = self.min,
max = self.max,
sum = self.sum,
n = self.n,
average = self.sum / self.n)
class CategoryStatsCollector(object):
def __init__(self):
self.categories = dict()
def add(self, value):
self.categories[value] = self.categories.get(value, 0) + 1
def getStats(self):
return dict(categories = self.categories)
def getStatsFilename(filename, statsInfo, filters=[]):
if not os.path.isabs(filename):
raise RuntimeError("Filename %s is not an absolute path" % filename)
if not filename.endswith(".csv"):
raise RuntimeError("generateStats only supports csv files: %s" % filename)
d = os.path.dirname(filename)
basename = os.path.basename(filename).replace("csv", "stats")
sstring = "stats"
for key in statsInfo:
sstring += "_" + key
if len(filters) > 0:
sstring += "_filters"
for filter in filters:
sstring += "_" + filter.getShortName()
statsFilename = os.path.join(d, sstring + "_" + basename)
return statsFilename
def generateStats(filename, statsInfo, maxSamples = None, filters=[], cache=True):
"""Generate requested statistics for a dataset and cache to a file.
If filename is None, then don't cache to a file"""
# Sanity checking
if not isinstance(statsInfo, dict):
raise RuntimeError("statsInfo must be a dict -- "
"found '%s' instead" % type(statsInfo))
filename = resource_filename("nupic.datafiles", filename)
if cache:
statsFilename = getStatsFilename(filename, statsInfo, filters)
# Use cached stats if found AND if it has the right data
if os.path.exists(statsFilename):
try:
r = pickle.load(open(statsFilename, "rb"))
except:
# Ok to ignore errors -- we will just re-generate the file
print "Warning: unable to load stats for %s -- " \
"will regenerate" % filename
r = dict()
requestedKeys = set([s for s in statsInfo])
availableKeys = set(r.keys())
unavailableKeys = requestedKeys.difference(availableKeys)
if len(unavailableKeys ) == 0:
return r
else:
print "generateStats: re-generating stats file %s because " \
"keys %s are not available" % \
(filename, str(unavailableKeys))
os.remove(filename)
print "Generating statistics for file '%s' with filters '%s'" % (filename, filters)
sensor = RecordSensor()
sensor.dataSource = FileRecordStream(filename)
sensor.preEncodingFilters = filters
# Convert collector description to collector object
stats = []
for field in statsInfo:
# field = key from statsInfo
if statsInfo[field] == "number":
# This wants a field name e.g. consumption and the field type as the value
statsInfo[field] = NumberStatsCollector()
elif statsInfo[field] == "category":
statsInfo[field] = CategoryStatsCollector()
else:
raise RuntimeError("Unknown stats type '%s' for field '%s'" % (statsInfo[field], field))
# Now collect the stats
if maxSamples is None:
maxSamples = 500000
for i in xrange(maxSamples):
try:
record = sensor.getNextRecord()
except StopIteration:
break
for (name, collector) in statsInfo.items():
collector.add(record[name])
del sensor
# Assemble the results and return
r = dict()
for (field, collector) in statsInfo.items():
stats = collector.getStats()
if field not in r:
r[field] = stats
else:
r[field].update(stats)
if cache:
f = open(statsFilename, "wb")
pickle.dump(r, f)
f.close()
# caller may need to know name of cached file
r["_filename"] = statsFilename
return r
| ywcui1990/nupic | src/nupic/data/stats.py | Python | agpl-3.0 | 6,351 | 0.013384 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-08-02 21:54
from __future__ import unicode_literals
from django.db import migrations, models
def copy_to_question_page(apps, schema_editor):
current_database = schema_editor.connection.alias
QuestionPage = apps.get_model('wizard_builder.QuestionPage')
for page in QuestionPage.objects.using(current_database):
page.new_position = page.position
page.new_section = page.section
for site in page.sites.all():
page.new_sites.add(site)
page.save()
class Migration(migrations.Migration):
dependencies = [
('sites', '0002_alter_domain_unique'),
('wizard_builder', '0008_remove_textpage'),
]
operations = [
migrations.AddField(
model_name='questionpage',
name='new_position',
field=models.PositiveSmallIntegerField(default=0, verbose_name='position'),
),
migrations.AddField(
model_name='questionpage',
name='new_section',
field=models.IntegerField(choices=[(1, 'When'), (2, 'Where'), (3, 'What'), (4, 'Who')], default=1),
),
migrations.AddField(
model_name='questionpage',
name='new_sites',
field=models.ManyToManyField(to='sites.Site'),
),
migrations.RunPython(
copy_to_question_page,
reverse_code=migrations.RunPython.noop,
),
]
| scattermagic/django-wizard-builder | wizard_builder/migrations/0009_pagebase_to_questionpage.py | Python | bsd-3-clause | 1,476 | 0.001355 |
#!/bin/python
# -*- coding: utf-8 -*-
# Fenrir TTY screen reader
# By Chrys, Storm Dragon, and contributers.
import os, struct, sys, pty, tty, termios, shlex, signal, pyte, time, fcntl ,getpass
from select import select
from fenrirscreenreader.core import debug
from fenrirscreenreader.core.eventData import fenrirEventType
from fenrirscreenreader.core.screenDriver import screenDriver
from fenrirscreenreader.utils import screen_utils
class fenrirScreen(pyte.Screen):
def set_margins(self, *args, **kwargs):
kwargs.pop("private", None)
super(fenrirScreen, self).set_margins(*args, **kwargs)
class Terminal:
def __init__(self, columns, lines, p_in):
self.text = ''
self.attributes = None
self.screen = fenrirScreen(columns, lines)
self.screen.write_process_input = \
lambda data: p_in.write(data.encode())
self.stream = pyte.ByteStream()
self.stream.attach(self.screen)
def feed(self, data):
self.stream.feed(data)
def updateAttributes(self, initialize = False):
buffer = self.screen.buffer
lines = None
if not initialize:
lines = self.screen.dirty
else:
lines = range(self.screen.lines)
self.attributes = [[list(attribute[1:]) + [False, 'default', 'default'] for attribute in line.values()] for line in buffer.values()]
for y in lines:
try:
t = self.attributes[y]
except:
self.attributes.append([])
self.attributes[y] = [list(attribute[1:]) + [False, 'default', 'default'] for attribute in (buffer[y].values())]
if len(self.attributes[y]) < self.screen.columns:
diff = self.screen.columns - len(self.attributes[y])
self.attributes[y] += [['default', 'default', False, False, False, False, False, False, 'default', 'default']] * diff
def resize(self, lines, columns):
self.screen.resize(lines, columns)
self.setCursor()
self.updateAttributes(True)
def setCursor(self, x = -1, y = -1):
xPos = x
yPos = y
if xPos == -1:
xPos = self.screen.cursor.x
if yPos == -1:
yPos = self.screen.cursor.y
self.screen.cursor.x = min(self.screen.cursor.x, self.screen.columns - 1)
self.screen.cursor.y = min(self.screen.cursor.y, self.screen.lines - 1)
def GetScreenContent(self):
cursor = self.screen.cursor
self.text = '\n'.join(self.screen.display)
self.updateAttributes(self.attributes == None)
self.screen.dirty.clear()
return {"cursor": (cursor.x, cursor.y),
'lines': self.screen.lines,
'columns': self.screen.columns,
"text": self.text,
'attributes': self.attributes.copy(),
'screen': 'pty',
'screenUpdateTime': time.time(),
}.copy()
class driver(screenDriver):
def __init__(self):
screenDriver.__init__(self)
self.signalPipe = os.pipe()
self.p_out = None
self.terminal = None
self.p_pid = -1
signal.signal(signal.SIGWINCH, self.handleSigwinch)
def initialize(self, environment):
self.env = environment
self.command = self.env['runtime']['settingsManager'].getSetting('general','shell')
self.shortcutType = self.env['runtime']['inputManager'].getShortcutType()
self.env['runtime']['processManager'].addCustomEventThread(self.terminalEmulation)
def getCurrScreen(self):
self.env['screen']['oldTTY'] = 'pty'
self.env['screen']['newTTY'] = 'pty'
def injectTextToScreen(self, msgBytes, screen = None):
if not screen:
screen = self.p_out.fileno()
if isinstance(msgBytes, str):
msgBytes = bytes(msgBytes, 'UTF-8')
os.write(screen, msgBytes)
def getSessionInformation(self):
self.env['screen']['autoIgnoreScreens'] = []
self.env['general']['prevUser'] = getpass.getuser()
self.env['general']['currUser'] = getpass.getuser()
def readAll(self, fd, timeout = 0.3, interruptFd = None, len = 65536):
msgBytes = b''
fdList = []
fdList += [fd]
if interruptFd:
fdList += [interruptFd]
starttime = time.time()
while True:
r = screen_utils.hasMoreWhat(fdList, 0.0001)
# nothing more to read
if not fd in r:
break
data = os.read(fd, len)
if data == b'':
raise EOFError
msgBytes += data
# exit on interrupt available
if interruptFd in r:
break
# respect timeout but wait a little bit of time to see if something more is here
if (time.time() - starttime) >= timeout:
break
return msgBytes
def openTerminal(self, columns, lines, command):
p_pid, master_fd = pty.fork()
if p_pid == 0: # Child.
argv = shlex.split(command)
env = os.environ.copy()
#values are VT100,xterm-256color,linux
try:
if env["TERM"] == '':
env["TERM"] = 'linux'
except:
env["TERM"] = 'linux'
os.execvpe(argv[0], argv, env)
# File-like object for I/O with the child process aka command.
p_out = os.fdopen(master_fd, "w+b", 0)
return Terminal(columns, lines, p_out), p_pid, p_out
def resizeTerminal(self,fd):
s = struct.pack('HHHH', 0, 0, 0, 0)
s = fcntl.ioctl(0, termios.TIOCGWINSZ, s)
fcntl.ioctl(fd, termios.TIOCSWINSZ, s)
lines, columns, _, _ = struct.unpack('hhhh', s)
return lines, columns
def getTerminalSize(self, fd):
s = struct.pack('HHHH', 0, 0, 0, 0)
lines, columns, _, _ = struct.unpack('HHHH', fcntl.ioctl(fd, termios.TIOCGWINSZ, s))
return lines, columns
def handleSigwinch(self, *args):
os.write(self.signalPipe[1], b'w')
def terminalEmulation(self,active , eventQueue):
try:
old_attr = termios.tcgetattr(sys.stdin)
tty.setraw(0)
lines, columns = self.getTerminalSize(0)
if self.command == '':
self.command = screen_utils.getShell()
self.terminal, self.p_pid, self.p_out = self.openTerminal(columns, lines, self.command)
lines, columns = self.resizeTerminal(self.p_out)
self.terminal.resize(lines, columns)
fdList = [sys.stdin, self.p_out, self.signalPipe[0]]
while active.value:
r, _, _ = select(fdList, [], [], 1)
# none
if r == []:
continue
# signals
if self.signalPipe[0] in r:
os.read(self.signalPipe[0], 1)
lines, columns = self.resizeTerminal(self.p_out)
self.terminal.resize(lines, columns)
# input
if sys.stdin in r:
try:
msgBytes = self.readAll(sys.stdin.fileno(), len=4096)
except (EOFError, OSError):
eventQueue.put({"Type":fenrirEventType.StopMainLoop,"Data":None})
break
if self.shortcutType == 'KEY':
try:
self.injectTextToScreen(msgBytes)
except:
eventQueue.put({"Type":fenrirEventType.StopMainLoop,"Data":None})
break
else:
eventQueue.put({"Type":fenrirEventType.ByteInput,
"Data":msgBytes })
# output
if self.p_out in r:
try:
msgBytes = self.readAll(self.p_out.fileno(), interruptFd=sys.stdin.fileno())
except (EOFError, OSError):
eventQueue.put({"Type":fenrirEventType.StopMainLoop,"Data":None})
break
# feed and send event bevore write, the pyte already has the right state
# so fenrir already can progress bevore os.write what should give some better reaction time
self.terminal.feed(msgBytes)
eventQueue.put({"Type":fenrirEventType.ScreenUpdate,
"Data":screen_utils.createScreenEventData(self.terminal.GetScreenContent())
})
self.injectTextToScreen(msgBytes, screen=sys.stdout.fileno())
except Exception as e: # Process died?
print(e)
eventQueue.put({"Type":fenrirEventType.StopMainLoop,"Data":None})
finally:
os.kill(self.p_pid, signal.SIGTERM)
self.p_out.close()
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_attr)
eventQueue.put({"Type":fenrirEventType.StopMainLoop,"Data":None})
sys.exit(0)
def getCurrApplication(self):
pass
| chrys87/fenrir | src/fenrirscreenreader/screenDriver/ptyDriver.py | Python | lgpl-3.0 | 9,221 | 0.010845 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-12-17 20:50
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('jordbruksmark', '0002_auto_20161217_2140'),
]
operations = [
migrations.AlterModelOptions(
name='wochen_menge',
options={'verbose_name': 'Wochen Menge', 'verbose_name_plural': 'Wochen Mengen'},
),
]
| ortoloco/jordbruksmark | jordbruksmark/migrations/0003_auto_20161217_2150.py | Python | gpl-3.0 | 472 | 0.002119 |
# Copyright 2020 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_log import log as oslo_logging
from cloudbaseinit import conf as cloudbaseinit_conf
from cloudbaseinit import exception
from cloudbaseinit.metadata.services import base
from cloudbaseinit.metadata.services import baseconfigdrive
from cloudbaseinit.models import network as network_model
from cloudbaseinit.utils import debiface
from cloudbaseinit.utils import network as network_utils
from cloudbaseinit.utils import serialization
CONF = cloudbaseinit_conf.CONF
LOG = oslo_logging.getLogger(__name__)
class NoCloudNetworkConfigV1Parser(object):
NETWORK_LINK_TYPE_PHY = 'physical'
NETWORK_LINK_TYPE_BOND = 'bond'
NETWORK_LINK_TYPE_VLAN = 'vlan'
NETWORK_SERVICE_NAMESERVER = 'nameserver'
SUPPORTED_NETWORK_CONFIG_TYPES = [
NETWORK_LINK_TYPE_PHY,
NETWORK_LINK_TYPE_BOND,
NETWORK_LINK_TYPE_VLAN,
NETWORK_SERVICE_NAMESERVER
]
def _parse_subnets(self, subnets, link_name):
networks = []
if not subnets or not isinstance(subnets, list):
LOG.warning("Subnets '%s' is empty or not a list.",
subnets)
return networks
for subnet in subnets:
if not isinstance(subnet, dict):
LOG.warning("Subnet '%s' is not a dictionary",
subnet)
continue
if subnet.get("type") in ["dhcp", "dhcp6"]:
continue
routes = []
for route_data in subnet.get("routes", []):
route_netmask = route_data.get("netmask")
route_network = route_data.get("network")
route_network_cidr = network_utils.ip_netmask_to_cidr(
route_network, route_netmask)
route_gateway = route_data.get("gateway")
route = network_model.Route(
network_cidr=route_network_cidr,
gateway=route_gateway
)
routes.append(route)
address_cidr = subnet.get("address")
netmask = subnet.get("netmask")
if netmask:
address_cidr = network_utils.ip_netmask_to_cidr(
address_cidr, netmask)
gateway = subnet.get("gateway")
if gateway:
# Map the gateway as a default route, depending on the
# IP family / version (4 or 6)
gateway_net_cidr = "0.0.0.0/0"
if netaddr.valid_ipv6(gateway):
gateway_net_cidr = "::/0"
routes.append(
network_model.Route(
network_cidr=gateway_net_cidr,
gateway=gateway
)
)
networks.append(network_model.Network(
link=link_name,
address_cidr=address_cidr,
dns_nameservers=subnet.get("dns_nameservers"),
routes=routes
))
return networks
def _parse_physical_config_item(self, item):
if not item.get('name'):
LOG.warning("Physical NIC does not have a name.")
return
link = network_model.Link(
id=item.get('name'),
name=item.get('name'),
type=network_model.LINK_TYPE_PHYSICAL,
enabled=True,
mac_address=item.get('mac_address'),
mtu=item.get('mtu'),
bond=None,
vlan_link=None,
vlan_id=None
)
return network_model.NetworkDetailsV2(
links=[link],
networks=self._parse_subnets(item.get("subnets"), link.name),
services=[]
)
def _parse_bond_config_item(self, item):
if not item.get('name'):
LOG.warning("Bond does not have a name.")
return
bond_params = item.get('params')
if not bond_params:
LOG.warning("Bond does not have parameters")
return
bond_mode = bond_params.get('bond-mode')
if bond_mode not in network_model.AVAILABLE_BOND_TYPES:
raise exception.CloudbaseInitException(
"Unsupported bond mode: %s" % bond_mode)
bond_lacp_rate = None
if bond_mode == network_model.BOND_TYPE_8023AD:
bond_lacp_rate = bond_params.get('bond-lacp-rate')
if (bond_lacp_rate and bond_lacp_rate not in
network_model.AVAILABLE_BOND_LACP_RATES):
raise exception.CloudbaseInitException(
"Unsupported bond lacp rate: %s" % bond_lacp_rate)
bond_xmit_hash_policy = bond_params.get('xmit_hash_policy')
if (bond_xmit_hash_policy and bond_xmit_hash_policy not in
network_model.AVAILABLE_BOND_LB_ALGORITHMS):
raise exception.CloudbaseInitException(
"Unsupported bond hash policy: %s" %
bond_xmit_hash_policy)
bond_interfaces = item.get('bond_interfaces')
bond = network_model.Bond(
members=bond_interfaces,
type=bond_mode,
lb_algorithm=bond_xmit_hash_policy,
lacp_rate=bond_lacp_rate,
)
link = network_model.Link(
id=item.get('name'),
name=item.get('name'),
type=network_model.LINK_TYPE_BOND,
enabled=True,
mac_address=item.get('mac_address'),
mtu=item.get('mtu'),
bond=bond,
vlan_link=None,
vlan_id=None
)
return network_model.NetworkDetailsV2(
links=[link],
networks=self._parse_subnets(item.get("subnets"), link.name),
services=[]
)
def _parse_vlan_config_item(self, item):
if not item.get('name'):
LOG.warning("VLAN NIC does not have a name.")
return
link = network_model.Link(
id=item.get('name'),
name=item.get('name'),
type=network_model.LINK_TYPE_VLAN,
enabled=True,
mac_address=item.get('mac_address'),
mtu=item.get('mtu'),
bond=None,
vlan_link=item.get('vlan_link'),
vlan_id=item.get('vlan_id')
)
return network_model.NetworkDetailsV2(
links=[link],
networks=self._parse_subnets(item.get("subnets"), link.name),
services=[]
)
def _parse_nameserver_config_item(self, item):
return network_model.NetworkDetailsV2(
links=[],
networks=[],
services=[network_model.NameServerService(
addresses=item.get('address', []),
search=item.get('search')
)]
)
def _get_network_config_parser(self, parser_type):
parsers = {
self.NETWORK_LINK_TYPE_PHY: self._parse_physical_config_item,
self.NETWORK_LINK_TYPE_BOND: self._parse_bond_config_item,
self.NETWORK_LINK_TYPE_VLAN: self._parse_vlan_config_item,
self.NETWORK_SERVICE_NAMESERVER: self._parse_nameserver_config_item
}
parser = parsers.get(parser_type)
if not parser:
raise exception.CloudbaseInitException(
"Network config parser '%s' does not exist",
parser_type)
return parser
def parse(self, network_config):
links = []
networks = []
services = []
if not network_config:
LOG.warning("Network configuration is empty")
return
if not isinstance(network_config, list):
LOG.warning("Network config '%s' is not a list.",
network_config)
return
for network_config_item in network_config:
if not isinstance(network_config_item, dict):
LOG.warning("Network config item '%s' is not a dictionary",
network_config_item)
continue
net_conf_type = network_config_item.get("type")
if net_conf_type not in self.SUPPORTED_NETWORK_CONFIG_TYPES:
LOG.warning("Network config type '%s' is not supported",
net_conf_type)
continue
net_details = (
self._get_network_config_parser(net_conf_type)
(network_config_item))
if net_details:
links += net_details.links
networks += net_details.networks
services += net_details.services
return network_model.NetworkDetailsV2(
links=links,
networks=networks,
services=services
)
class NoCloudConfigDriveService(baseconfigdrive.BaseConfigDriveService):
def __init__(self):
super(NoCloudConfigDriveService, self).__init__(
'cidata', 'meta-data')
self._meta_data = {}
def get_user_data(self):
return self._get_cache_data("user-data")
def _get_meta_data(self):
if self._meta_data:
return self._meta_data
raw_meta_data = self._get_cache_data("meta-data", decode=True)
try:
self._meta_data = (
serialization.parse_json_yaml(raw_meta_data))
except serialization.YamlParserConfigError as ex:
LOG.error("Metadata could not be parsed")
LOG.exception(ex)
return self._meta_data
def get_host_name(self):
return self._get_meta_data().get('local-hostname')
def get_instance_id(self):
return self._get_meta_data().get('instance-id')
def get_public_keys(self):
raw_ssh_keys = self._get_meta_data().get('public-keys')
if not raw_ssh_keys:
return []
return [raw_ssh_keys[key].get('openssh-key') for key in raw_ssh_keys]
def get_network_details(self):
debian_net_config = self._get_meta_data().get('network-interfaces')
if not debian_net_config:
return None
return debiface.parse(debian_net_config)
def get_network_details_v2(self):
try:
raw_network_data = self._get_cache_data("network-config",
decode=True)
network_data = serialization.parse_json_yaml(raw_network_data)
if not network_data:
LOG.info("V2 network metadata is empty")
return
if not isinstance(network_data, dict):
LOG.warning("V2 network metadata is not a dictionary")
return
except base.NotExistingMetadataException:
LOG.info("V2 network metadata not found")
return
except serialization.YamlParserConfigError:
LOG.exception("V2 network metadata could not be deserialized")
return
network_data_version = network_data.get("version")
if network_data_version != 1:
LOG.error("Network data version '%s' is not supported",
network_data_version)
return
network_config_parser = NoCloudNetworkConfigV1Parser()
return network_config_parser.parse(network_data.get("config"))
| stackforge/cloudbase-init | cloudbaseinit/metadata/services/nocloudservice.py | Python | apache-2.0 | 11,949 | 0 |
#!/usr/bin/env python
import sys
def inv(s):
if s[0] == '-':
return s[1:]
elif s[0] == '+':
return '-' + s[1:]
else: # plain number
return '-' + s
if len(sys.argv) != 1:
print 'Usage:', sys.argv[0]
sys.exit(1)
for line in sys.stdin:
linesplit = line.strip().split()
if len(linesplit) == 3:
assert(linesplit[0] == 'p')
print('p ' + inv(linesplit[2]) + ' ' + linesplit[1])
elif len(linesplit) == 5:
assert(linesplit[0] == 's')
print('s ' + \
inv(linesplit[2]) + ' ' + linesplit[1] + ' ' + \
inv(linesplit[4]) + ' ' + linesplit[3] )
elif len(linesplit) == 0:
print
| hlzz/dotfiles | graphics/cgal/Segment_Delaunay_graph_Linf_2/developer_scripts/lsprotate90.py | Python | bsd-3-clause | 636 | 0.023585 |
# -*- coding: utf-8 -*-
"""
anparser - an Open Source Android Artifact Parser
Copyright (C) 2015 Preston Miller
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'prmiller91'
__license__ = 'GPLv3'
__date__ = '20150129'
__version__ = '0.00'
from collections import OrderedDict
import logging
import yara
import pandas as pd
path = None
match = None
yara_list = []
def yara_parser(file_list, rules_path):
"""
Parses files for Malware signatures with Yara
:param file_list: List of all files
:param rules_path: Path to custom Yara rules
:return: Dictionary of matches
"""
try:
rules = yara.compile(rules_path)
except (yara.libyara_wrapper.YaraSyntaxError, IOError) as exception:
msg = 'Yara Rule Compilation Error: {0:s}'.format(rules_path + ' > ' + str(exception))
print(msg)
logging.error(msg)
raise IOError
for file_path in file_list:
try:
match = rules.match(file_path)
except yara.libyara_wrapper.YaraMatchError as exception:
msg = 'Yara Match Error: {0:s}'.format(file_path + ' > ' + str(exception))
logging.error(msg)
pass
if match:
yara_processor(match, file_path)
return pd.DataFrame(yara_list)
def yara_processor(match, path):
"""
Processes Yara Match for Output
:param match: A single yara match
:param path: File path for match
:return:
"""
yara_data = OrderedDict()
for key in match.keys():
rule = match[key][0]['rule']
matches = match[key][0]['matches']
strings = match[key][0]['strings']
meta = match[key][0]['meta']
tags = match[key][0]['tags']
for string in strings:
yara_data['File Path'] = path
yara_data['Rule'] = rule
yara_data['Matches'] = str(matches)
if meta != {}:
try:
yara_data['Author'] = meta['author']
except KeyError:
yara_data['Author'] = ''
try:
yara_data['Description'] = meta['description']
except KeyError:
yara_data['Description'] = ''
else:
yara_data['Author'] = ''
yara_data['Description'] = ''
yara_data['Flag'] = string['flags']
yara_data['Identifier'] = string['identifier']
yara_data['Data'] = string['data']
yara_data['Offset'] = string['offset']
if tags == []:
yara_data['Tags'] = ''
else:
yara_data['Tags'] = tags
yara_list.append(yara_data)
yara_data = OrderedDict() | anparser/anparser | anparser/plugins/other_plugins/yara_parser.py | Python | gpl-3.0 | 3,312 | 0.001208 |
from dart.model.base import BaseModel, dictable
@dictable
class ApiKey(BaseModel):
def __init__(self, id, user_id, api_key, api_secret):
"""
:type user_id: str
:type api_key: str
:type api_secret: str
"""
self.id = id
self.user_id = user_id
self.api_key = api_key
self.api_secret = api_secret
| RetailMeNotSandbox/dart | src/python/dart/model/api_key.py | Python | mit | 370 | 0.002703 |
import time
import recordlib
if __name__ == "__main__":
recordlib.initialize()
print("waiting for input")
recordlib.logging.info("waiting for input")
try:
# define interrupt, get rising signal, debounce pin
recordlib.GPIO.add_event_detect(
recordlib.TASTER_1,
recordlib.GPIO.RISING,
callback=recordlib.start_recording,
bouncetime=1000
)
recordlib.GPIO.add_event_detect(
recordlib.TASTER_2,
recordlib.GPIO.RISING,
callback=recordlib.stop_recording,
bouncetime=1000
)
# keep script running
while True:
time.sleep(0.5)
finally:
recordlib.GPIO.cleanup()
print("\nQuit\n")
| benjaminhabbel/motion_recorder | old/button_loop.py | Python | gpl-3.0 | 770 | 0 |
from __future__ import print_function
from imports import *
import common
class Base( common.Base ):
pass
class TestUnitMiSeqToNewbler( Base ):
def _C( self, *args, **kwargs ):
from bactpipeline.fix_fastq import miseq_to_newbler_id
return miseq_to_newbler_id( *args, **kwargs )
def test_r1_correct( self ):
r = self._C( 'abcd 1' )
eq_( 'abcd#0/1 (abcd 1)', r )
def test_r2_correct( self ):
r = self._C( 'abcd 2' )
eq_( 'abcd#0/2 (abcd 2)', r )
class TestUnitModFqRead( Base ):
def _C( self, *args, **kwargs ):
from bactpipeline.fix_fastq import mod_fq_read
return mod_fq_read( *args, **kwargs )
def test_mods_correctly( self ):
from bactpipeline.fix_fastq import miseq_to_newbler_id as mtni
id = 'abcd 1'
seq = 'ATGC'
qual = 'IIII'
r = self._C( id, seq, qual )
read = '{0}\n{1}\n+\n{2}\n'.format(mtni(id),seq,qual)
eq_( read, r )
class TestUnitParseFq( Base ):
def _C( self, *args, **kwargs ):
from bactpipeline.fix_fastq import parse_fq
return parse_fq( *args, **kwargs )
def fake_fq( self ):
with open( 'fake.fq', 'w' ) as fh:
for i in range( 1, 101 ):
fh.write( '@abcd:{0} {1}\n'.format( i, (i%2)+1) )
fh.write( 'ACGT\n' )
fh.write( '+\n' )
fh.write( 'IIII\n' )
return 'fake.fq'
def test_parses( self ):
fq = self.fake_fq()
r = self._C( fq )
for id, seq, qual in r:
ids = id.split()
x = ids[0].split(':')
eq_( '@abcd', x[0] )
eq_( 'ACGT', seq )
eq_( 'IIII', qual )
class TestFunctional( Base ):
def sample_files( self ):
fixdir = join( dirname(__file__), 'fixtures', 'fix_fastq' )
return glob( join( fixdir, '*.fastq' ) )
def _C( self, *args, **kwargs ):
script = 'fix_fastq'
cmd = [script]
if kwargs.get('outdir',False):
cmd += ['-o', kwargs.get('outdir')]
cmd += list(*args)
print(cmd)
return subprocess.call( cmd )
def test_runs_correctly( self ):
fastqs = self.sample_files()
r = self._C( fastqs )
eq_( 0, r )
ok_( exists( 'outdir' ), 'did not create outdir by default' )
fqs = os.listdir( 'outdir' )
eq_( set([]), set([basename(fq) for fq in fastqs]) - set(fqs) )
| VDBWRAIR/bactpipeline | test/test_fix_fastq.py | Python | gpl-2.0 | 2,468 | 0.040519 |
"""
@brief test log(time=200s)
"""
import os
import unittest
import math
import warnings
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder, is_travis_or_appveyor
from ensae_teaching_cs.special.image.image_synthese_base import Vecteur, Couleur, Source, Repere
from ensae_teaching_cs.special.image.image_synthese_sphere import Sphere
from ensae_teaching_cs.special.image.image_synthese_phong import ScenePhong
from ensae_teaching_cs.special.image.image_synthese_facette import Rectangle
from ensae_teaching_cs.special.image.image_synthese_facette_image import RectangleImage, SphereReflet
class TestImageSyntheseImage(unittest.TestCase):
def test_scene_image(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
temp = get_temp_folder(__file__, "temp_scene_bette")
image = os.path.join(temp, "..", "data", "bette_davis.png")
s = ScenePhong(Repere(), math.pi / 1.5, 400, 200)
s.ajoute_source(Source(Vecteur(0, 8, 8), Couleur(0.4, 0.4, 0.4)))
s.ajoute_source(Source(Vecteur(10, 0, 0), Couleur(0.4, 0.4, 0.4)))
s.ajoute_source(Source(Vecteur(8, 8, 4.5), Couleur(0.4, 0.4, 0.4)))
s.ajoute_objet(Sphere(Vecteur(3, -4, 7), 1, Couleur(1, 0, 0)))
s.ajoute_objet(SphereReflet(Vecteur(0, -400, 12),
396, Couleur(0.5, 0.5, 0.5), 0.5))
s.ajoute_source(Source(Vecteur(7, 2, 8), Couleur(0.2, 0.2, 0.2)))
s.ajoute_source(Source(Vecteur(12.5, 3, 5), Couleur(0.2, 0.2, 0.2)))
s.ajoute_source(Source(Vecteur(-12.5, 1, 6), Couleur(0.2, 0.2, 0.2)))
s.ajoute_objet(Rectangle(Vecteur(-12.4, 0.99, 5.9), Vecteur(-12.6, 0.99, 5.9),
Vecteur(-12.6, 0.99, 6.1), None, Couleur(0, 0, 0)))
if is_travis_or_appveyor() == "travis":
warnings.warn("pygame is not available")
return
import pygame
s.ajoute_objet(RectangleImage(Vecteur(8, -3.5, 9), Vecteur(2, -3.5, 8),
Vecteur(2, 3.8, 8), None, image, invertx=True, pygame=pygame))
from ensae_teaching_cs.helpers.pygame_helper import wait_event
screen = pygame.display.set_mode(s.dim)
screen.fill((255, 255, 255))
s.construit_image(screen, pygame=pygame, fLOG=fLOG)
pygame.image.save(screen, os.path.join(temp, "scene_bette.png"))
if __name__ == "__main__":
wait_event(pygame)
if __name__ == "__main__":
unittest.main()
| sdpython/ensae_teaching_cs | _unittests/ut_special/test_LONG_image2.py | Python | mit | 2,586 | 0.001933 |
# ===========================================================================
# Copyright 2013 University of Limerick
#
# This file is part of DREAM.
#
# DREAM is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DREAM is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DREAM. If not, see <http://www.gnu.org/licenses/>.
# ===========================================================================
'''
Created on 18 Feb 2013
@author: George
'''
'''
models a frame entity. This can flow through the system and carry parts
'''
from simpy import Resource
from Globals import G
from Entity import Entity
#The entity object
class Frame(Entity):
type="Frame"
capacity=4 #the number of parts that the frame can take
def __init__(self, id=None, name=None,**kw):
Entity.__init__(self,id=id,name = name)
self.Res=Resource(self.capacity)
#dimension data
self.width=2.0
self.height=2.0
self.lenght=2.0
def getFrameQueue(self):
return self.Res.users
| bchiroma/dreamproject | dream/simulation/Frame.py | Python | gpl-3.0 | 1,515 | 0.014521 |
#
# This is a parser that generates the document tree for you.
#
# To use this parser, create an instance of XElementParser:
# parser = saxexts.make_parser()
# xp = XElementParser(parser)
#
# If you have defined classes in the current environment, you might want ot
# pass this environment *to* the parser, so your classes will be created as
# tree nodes instead of the default (base) XElement class instances:
#
#
# def MyElementClass1(XElement): ...
# def MyElementClass2(XElement): ...
# ...
#
# parser = saxexts.make_parser()
# xp = XElementParser(parser, vars())
#
# Once your parser is constructed, you can parse one or more documents as
# follows:
# doc_list = ['f1','f2','f3']
# -or-
# doc_list = ['url1','url2','url3']
#
# for doc in doc_list:
# doc_tree = xp.process(doc)
# print doc_tree.toXML()
import string
import sys
import types
from xml.sax import saxexts
from xml.sax import saxlib
from xelement import XElement, XTreeHandler
class XElementParser:
def __init__(self, outer_env={}, parser=None):
if parser == None:
self.parser = saxexts.XMLValParserFactory.make_parser()
else:
self.parser = parser
self.parser_error_handler = ErrorPrinter()
self.parser.setErrorHandler(self.parser_error_handler)
self.xth = XTreeHandler(IgnoreWhiteSpace='yes',
RemoveWhiteSpace='yes',
CreateElementMap='yes',
RequireUserClasses='yes')
for x in outer_env.keys():
if type(outer_env[x]) == types.ClassType or isinstance(x, object):
self.xth.registerElementClass(outer_env[x], x)
self.parser.setDocumentHandler(self.xth)
def process(self, document_uri):
Ok=None
try:
self.parser_error_handler.reset()
self.parser.parse(document_uri)
if self.parser_error_handler.has_errors():
raise "validation failed"
return self.xth.getDocument().getChild()
except IOError,e:
print "\nI/O Error: " + document_uri + ": " + str(e)
except saxlib.SAXException,e:
print "\nParse Error: " + document_uri + ": " + str(e)
class ErrorPrinter:
"A simple class that just prints error messages to standard out."
def __init__(self):
self.error_count = 0
def reset(self):
self.error_count = 0
def has_errors(self):
return self.error_count
def warning(self, exception):
print "Warning: %s %s" % (str(exception), exception.getMessage())
sys.exit(1)
def error(self, exception):
self.error_count = self.error_count + 1
print "Error: %s %s" % (str(exception), exception.getMessage())
def fatalError(self, exception):
self.error_count = self.error_count + 1
print "Fatal Error: %s %s" % (str(exception), exception.getMessage())
| aarestad/gradschool-stuff | xml-class/python-xml/JobMarkupLanguage/xparser.py | Python | gpl-2.0 | 2,877 | 0.014599 |
import os
import re
BROKER_URL = os.getenv("CLOUDAMQP_URL", 'amqp://')
# BROKER_POOL_LIMIT = None
MONGOLAB_URI = None
MONGOLAB_DB = None
URI_WITH_AUTH = None
mongolab = os.getenv("MONGOLAB_URI")
if mongolab is not None:
uri_pat = r"mongodb://([^:]+):([^@]+)@([^:]+):(\d+)/(.+)"
user, passwd, host, port, db = re.match(uri_pat, mongolab).groups()
uri = "mongodb://{}:{}".format(host, port)
MONGOLAB_URI = uri
MONGOLAB_DB = db
# CELERY_RESULT_BACKEND = uri
# CELERY_MONGODB_BACKEND_SETTINGS = {
# 'database': db,
# 'user': user,
# 'password': passwd
# }
CELERY_RESULT_BACKEND = BROKER_URL
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
| susurrant-audio/scdown | scdown/celeryconfig.py | Python | mit | 747 | 0 |
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import json
import re
from django.conf import settings
from django.db.models import Q
from django.http import (
Http404, HttpResponsePermanentRedirect, HttpResponseRedirect)
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from django.views.decorators.vary import vary_on_headers
from django.views.generic.base import TemplateView
import basket
from funfactory.urlresolvers import reverse
from jingo_minify.helpers import BUILD_ID_JS, BUNDLE_HASHES
from lib import l10n_utils
from rna.models import Release
from bedrock.firefox import version_re
from bedrock.firefox.forms import SMSSendForm
from bedrock.mozorg.context_processors import funnelcake_param
from bedrock.mozorg.decorators import cache_control_expires
from bedrock.mozorg.views import process_partnership_form
from bedrock.mozorg.helpers.misc import releasenotes_url
from bedrock.firefox.utils import is_current_or_newer
from bedrock.firefox.firefox_details import firefox_details, mobile_details
from lib.l10n_utils.dotlang import _
UA_REGEXP = re.compile(r"Firefox/(%s)" % version_re)
LANG_FILES = ['firefox/partners/index']
LOCALE_FXOS_HEADLINES = {
'de': {
'title': u"Firefox OS ist richtungsweisend für die Zukunft des "
u"mobilen Marktes",
'url': 'http://blog.mozilla.org/press-de/2014/02/23/'
'firefox-os-ist-richtungsweisend-fur-die-zukunft-des-mobilen-'
'marktes',
},
'en-GB': {
'title': u'Firefox OS Unleashes the Future of Mobile',
'url': 'http://blog.mozilla.org/press-uk/2014/02/23/'
'firefox-os-unleashes-the-future-of-mobile'
},
'en-US': {
'title': _('Firefox OS Unleashes the Future of Mobile'),
'url': 'https://blog.mozilla.org/press/2014/02/firefox-os-future-2/',
},
'es-AR': {
'title': u'Firefox OS te desvela el futuro de lo móvil',
'url': 'http://blog.mozilla.org/press-latam/2014/02/23/'
'firefox-os-te-desvela-el-futuro-de-lo-movil/',
},
'es-CL': {
'title': u'Firefox OS te desvela el futuro de lo móvil',
'url': 'http://blog.mozilla.org/press-latam/2014/02/23/'
'firefox-os-te-desvela-el-futuro-de-lo-movil/',
},
'es-ES': {
'title': u'Firefox OS te desvela el futuro de lo móvil',
'url': 'https://blog.mozilla.org/press/2014/02/firefox-os-future-2/',
},
'es-MX': {
'title': u'Firefox OS te desvela el futuro de lo móvil',
'url': 'http://blog.mozilla.org/press-latam/2014/02/23/'
'firefox-os-te-desvela-el-futuro-de-lo-movil/',
},
'fr': {
'title': u'Firefox OS chamboule le futur du mobile',
'url': 'http://blog.mozilla.org/press-fr/2014/02/23/'
'firefox-os-chamboule-le-futur-du-mobile',
},
'it': {
'title': u'Firefox OS svela il futuro del mobile',
'url': 'http://blog.mozilla.org/press-it/2014/02/23/'
'firefox-os-svela-il-futuro-del-mobile',
},
'pl': {
'title': u'Firefox OS uwalnia przyszłość technologii mobilnej',
'url': 'http://blog.mozilla.org/press-pl/2014/02/23/'
'firefox-os-uwalnia-przyszlosc-technologii-mobilnej',
},
'pt-BR': {
'title': u'Firefox OS apresenta o futuro dos dispositivos móveis',
'url': 'https://blog.mozilla.org/press-br/2014/02/23/'
'firefox-os-apresenta-o-futuro-dos-dispositivos-moveis/',
},
}
INSTALLER_CHANNElS = [
'release',
'beta',
'aurora',
# 'nightly', # soon
]
def get_js_bundle_files(bundle):
"""
Return a JSON string of the list of file names for lazy loaded
javascript.
"""
# mostly stolen from jingo_minify.helpers.js
if settings.DEBUG:
items = settings.MINIFY_BUNDLES['js'][bundle]
else:
build_id = BUILD_ID_JS
bundle_full = "js:%s" % bundle
if bundle_full in BUNDLE_HASHES:
build_id = BUNDLE_HASHES[bundle_full]
items = ("js/%s-min.js?build=%s" % (bundle, build_id,),)
return json.dumps([settings.MEDIA_URL + i for i in items])
JS_COMMON = get_js_bundle_files('partners_common')
JS_MOBILE = get_js_bundle_files('partners_mobile')
JS_DESKTOP = get_js_bundle_files('partners_desktop')
def get_latest_version(product='firefox', channel='release'):
if channel == 'organizations':
channel = 'esr'
if product == 'mobile':
return mobile_details.latest_version(channel)
else:
return firefox_details.latest_version(channel)
def installer_help(request):
installer_lang = request.GET.get('installer_lang', None)
installer_channel = request.GET.get('channel', None)
context = {
'installer_lang': None,
'installer_channel': None,
}
if installer_lang and installer_lang in firefox_details.languages:
context['installer_lang'] = installer_lang
if installer_channel and installer_channel in INSTALLER_CHANNElS:
context['installer_channel'] = installer_channel
return l10n_utils.render(request, 'firefox/installer-help.html', context)
@csrf_exempt
def sms_send(request):
form = SMSSendForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
try:
basket.send_sms(form.cleaned_data['number'],
'SMS_Android',
form.cleaned_data['optin'])
except basket.BasketException:
msg = form.error_class(
[_('An error occurred in our system. '
'Please try again later.')]
)
form.errors['__all__'] = msg
else:
return HttpResponseRedirect(
reverse('firefox.android.sms-thankyou'))
return l10n_utils.render(request, 'firefox/android/sms-send.html',
{'sms_form': form})
def windows_billboards(req):
major_version = req.GET.get('majorVersion')
minor_version = req.GET.get('minorVersion')
if major_version and minor_version:
major_version = float(major_version)
minor_version = float(minor_version)
if major_version == 5 and minor_version == 1:
return l10n_utils.render(req, 'firefox/unsupported/winxp.html')
return l10n_utils.render(req, 'firefox/unsupported/win2k.html')
def fx_home_redirect(request):
return HttpResponseRedirect(reverse('firefox.new'))
def dnt(request):
response = l10n_utils.render(request, 'firefox/dnt.html')
response['Vary'] = 'DNT'
return response
def all_downloads(request, channel):
if channel is None:
channel = 'release'
if channel == 'organizations':
channel = 'esr'
version = get_latest_version('firefox', channel)
query = request.GET.get('q')
channel_names = {
'release': _('Firefox'),
'beta': _('Firefox Beta'),
'aurora': _('Firefox Aurora'),
'esr': _('Firefox Extended Support Release'),
}
return l10n_utils.render(request, 'firefox/all.html', {
'full_builds': firefox_details.get_filtered_full_builds(version, query),
'test_builds': firefox_details.get_filtered_test_builds(version, query),
'query': query,
'channel': channel,
'channel_name': channel_names[channel],
})
@csrf_protect
def firefox_partners(request):
# If the current locale isn't in our list, return the en-US value
press_locale = request.locale if (
request.locale in LOCALE_FXOS_HEADLINES) else 'en-US'
template_vars = {
'locale_headline_url': LOCALE_FXOS_HEADLINES[press_locale]['url'],
'locale_headline_title': LOCALE_FXOS_HEADLINES[press_locale]['title'],
'js_common': JS_COMMON,
'js_mobile': JS_MOBILE,
'js_desktop': JS_DESKTOP,
}
form_kwargs = {
'interest_set': 'fx',
'lead_source': 'www.mozilla.org/firefox/partners/'}
return process_partnership_form(
request, 'firefox/partners/index.html', 'firefox.partners.index', template_vars, form_kwargs)
def releases_index(request):
releases = {}
major_releases = firefox_details.firefox_history_major_releases
minor_releases = firefox_details.firefox_history_stability_releases
for release in major_releases:
major_verion = float(re.findall(r'^\d+\.\d+', release)[0])
# The version numbering scheme of Firefox changes sometimes. The second
# number has not been used since Firefox 4, then reintroduced with
# Firefox ESR 24 (Bug 870540). On this index page, 24.1.x should be
# fallen under 24.0. This patter is a tricky part.
major_pattern = r'^' + \
re.escape(
('%s' if major_verion < 4 else '%g') % round(major_verion, 1))
releases[major_verion] = {
'major': release,
'minor': sorted(filter(lambda x: re.findall(major_pattern, x),
minor_releases),
key=lambda x: int(re.findall(r'\d+$', x)[0]))
}
return l10n_utils.render(request, 'firefox/releases/index.html',
{'releases': sorted(releases.items(), reverse=True)})
def latest_notes(request, product='firefox', channel='release'):
version = get_latest_version(product, channel)
if channel == 'beta':
version = re.sub(r'b\d+$', 'beta', version)
if channel == 'organizations':
version = re.sub(r'esr$', '', version)
dir = 'auroranotes' if channel == 'aurora' else 'releasenotes'
path = [product, version, dir]
locale = getattr(request, 'locale', None)
if locale:
path.insert(0, locale)
return HttpResponseRedirect('/' + '/'.join(path) + '/')
def latest_sysreq(request, channel='release'):
version = get_latest_version('firefox', channel)
if channel == 'beta':
version = re.sub(r'b\d+$', 'beta', version)
if channel == 'organizations':
version = re.sub(r'^(\d+).+', r'\1.0', version)
path = ['firefox', version, 'system-requirements']
locale = getattr(request, 'locale', None)
if locale:
path.insert(0, locale)
return HttpResponseRedirect('/' + '/'.join(path) + '/')
def show_whatsnew_tour(oldversion):
match = re.match(r'\d{1,2}', oldversion)
if match:
num_oldversion = int(match.group(0))
return num_oldversion < 29
return False
class LatestFxView(TemplateView):
"""
Base class to be extended by views that require visitor to be
using latest version of Firefox. Classes extending this class must
implement either `get_template_names` function or provide
`template_name` class attribute.
"""
@vary_on_headers('User-Agent')
def dispatch(self, *args, **kwargs):
return super(LatestFxView, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
# required for newsletter form post that is handled in
# newsletter/helpers.py
return self.get(request, *args, **kwargs)
def redirect_to(self):
"""
Redirect visitors based on their user-agent.
- Up-to-date Firefox users pass through.
- Other Firefox users go to the new page.
- Non Firefox users go to the new page.
"""
query = self.request.META.get('QUERY_STRING')
query = '?' + query if query else ''
user_agent = self.request.META.get('HTTP_USER_AGENT', '')
if 'Firefox' not in user_agent:
return reverse('firefox.new') + query
# TODO : Where to redirect bug 757206
user_version = '0'
match = UA_REGEXP.search(user_agent)
if match:
user_version = match.group(1)
if not is_current_or_newer(user_version):
return reverse('firefox.new') + query
return None
def render_to_response(self, context, **response_kwargs):
redirect_url = self.redirect_to()
if redirect_url is not None:
return HttpResponsePermanentRedirect(redirect_url)
else:
return l10n_utils.render(self.request,
self.get_template_names(),
context,
**response_kwargs)
class FirstrunView(LatestFxView):
def get(self, request, *args, **kwargs):
if not settings.DEV and not request.is_secure():
uri = 'https://{host}{path}'.format(
host=request.get_host(),
path=request.get_full_path(),
)
return HttpResponsePermanentRedirect(uri)
return super(FirstrunView, self).get(request, *args, **kwargs)
def get_template_names(self):
locale = l10n_utils.get_locale(self.request)
fc_ctx = funnelcake_param(self.request)
f = fc_ctx.get('funnelcake_id', 0)
if f == '30' and locale == 'en-US':
template = 'firefox/australis/firstrun-no-tour.html'
else:
template = 'firefox/australis/firstrun-tour.html'
# return a list to conform with original intention
return [template]
class WhatsnewView(LatestFxView):
# Locales targeted for FxOS
fxos_locales = []
locales_with_video = {
'en-US': 'american',
'en-GB': 'british',
'de': 'german_final',
'it': 'italian_final',
'ja': 'japanese_final',
'es-AR': 'spanish_final',
'es-CL': 'spanish_final',
'es-ES': 'spanish_final',
'es-MX': 'spanish_final',
}
def get(self, request, *args, **kwargs):
if not settings.DEV and not request.is_secure():
uri = 'https://{host}{path}'.format(
host=request.get_host(),
path=request.get_full_path(),
)
return HttpResponsePermanentRedirect(uri)
return super(WhatsnewView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
ctx = super(WhatsnewView, self).get_context_data(**kwargs)
locale = l10n_utils.get_locale(self.request)
if locale not in self.fxos_locales:
ctx['locales_with_video'] = self.locales_with_video
return ctx
def get_template_names(self):
version = self.kwargs.get('fx_version') or ''
locale = l10n_utils.get_locale(self.request)
fc_ctx = funnelcake_param(self.request)
f = fc_ctx.get('funnelcake_id', 0)
oldversion = self.request.GET.get('oldversion', '')
# old versions of Firefox sent a prefixed version
if oldversion.startswith('rv:'):
oldversion = oldversion[3:]
versions = ('29.', '30.', '31.')
if version == '29.0a1':
template = 'firefox/whatsnew-nightly-29.html'
elif version.startswith(versions):
if locale == 'en-US' and f == '31':
# funnelcake build 31 should always get the tour
template = 'firefox/australis/whatsnew-tour.html'
elif locale == 'en-US' and f == '30':
# funnelcake build 30 should not get the tour
template = 'firefox/australis/whatsnew-no-tour.html'
elif show_whatsnew_tour(oldversion):
# updating from pre-29 version
template = 'firefox/australis/whatsnew-tour.html'
else:
# default is no tour
template = 'firefox/australis/whatsnew-no-tour.html'
elif locale in self.fxos_locales:
template = 'firefox/whatsnew-fxos.html'
else:
template = 'firefox/whatsnew.html'
# return a list to conform with original intention
return [template]
class TourView(LatestFxView):
template_name = 'firefox/australis/help-menu-tour.html'
def get(self, request, *args, **kwargs):
if not settings.DEV and not request.is_secure():
uri = 'https://{host}{path}'.format(
host=request.get_host(),
path=request.get_full_path(),
)
return HttpResponsePermanentRedirect(uri)
return super(TourView, self).get(request, *args, **kwargs)
def release_notes_template(channel, product):
if product == 'Firefox OS':
return 'firefox/releases/os-notes.html'
prefix = dict((c, c.lower()) for c in Release.CHANNELS)
return 'firefox/releases/%s-notes.html' % prefix.get(channel, 'release')
def equivalent_release_url(release):
equivalent_release = (release.equivalent_android_release() or
release.equivalent_desktop_release())
if equivalent_release:
return releasenotes_url(equivalent_release)
def get_release_or_404(version, product):
if product == 'Firefox' and len(version.split('.')) == 3:
product_query = Q(product='Firefox') | Q(
product='Firefox Extended Support Release')
else:
product_query = Q(product=product)
release = get_object_or_404(Release, product_query, version=version)
if not release.is_public and not settings.DEV:
raise Http404
return release
def get_download_url(channel='Release'):
if channel == 'Aurora':
return reverse('firefox.channel') + '#aurora'
elif channel == 'Beta':
return reverse('firefox.channel') + '#beta'
else:
return reverse('firefox')
@cache_control_expires(1)
def release_notes(request, fx_version, product='Firefox'):
if product == 'Firefox OS' and fx_version in ('1.0.1', '1.1', '1.2'):
return l10n_utils.render(
request, 'firefox/os/notes-%s.html' % fx_version)
try:
release = get_release_or_404(fx_version, product)
except Http404:
release = get_release_or_404(fx_version + 'beta', product)
return HttpResponseRedirect(releasenotes_url(release))
new_features, known_issues = release.notes(public_only=not settings.DEV)
return l10n_utils.render(
request, release_notes_template(release.channel, product), {
'version': fx_version,
'download_url': get_download_url(release.channel),
'release': release,
'equivalent_release_url': equivalent_release_url(release),
'new_features': new_features,
'known_issues': known_issues})
@cache_control_expires(1)
def system_requirements(request, fx_version, product='Firefox'):
release = get_release_or_404(fx_version, product)
return l10n_utils.render(
request, 'firefox/releases/system_requirements.html',
{'release': release, 'version': fx_version})
| kewisch/bedrock | bedrock/firefox/views.py | Python | mpl-2.0 | 18,915 | 0.000212 |
# -*- coding: utf-8 -*-
#
# This file is part of REANA.
# Copyright (C) 2019 CERN.
#
# REANA is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Job Manager."""
import json
import shlex
from flask import current_app
from reana_commons.utils import calculate_file_access_time
from reana_db.database import Session
from reana_db.models import Job as JobTable
from reana_db.models import JobCache, JobStatus, Workflow
class JobManager:
"""Job management interface."""
def __init__(
self,
docker_img="",
cmd=[],
prettified_cmd="",
env_vars={},
workflow_uuid=None,
workflow_workspace=None,
job_name=None,
):
"""Instanciates basic job.
:param docker_img: Docker image.
:type docker_img: str
:param cmd: Command to execute.
:type cmd: list
:param prettified_cmd: pretified version of command to execute.
:type prettified_cmd: str
:param env_vars: Environment variables.
:type env_vars: dict
:param workflow_uuid: Unique workflow id.
:type workflow_uuid: str
:param workflow_workspace: Absolute path to workspace
:type workflow_workspace: str
:param job_name: Name of the job.
:type job_name: str
"""
self.docker_img = docker_img or ""
if isinstance(cmd, str):
self.cmd = shlex.split(cmd)
else:
self.cmd = cmd or []
self.prettified_cmd = prettified_cmd
self.workflow_uuid = workflow_uuid
self.workflow_workspace = workflow_workspace
self.job_name = job_name
self.env_vars = self._extend_env_vars(env_vars)
def execution_hook(fn):
"""Add before execution hooks and DB operations."""
def wrapper(inst, *args, **kwargs):
inst.before_execution()
backend_job_id = fn(inst, *args, **kwargs)
inst.create_job_in_db(backend_job_id)
inst.cache_job()
return backend_job_id
return wrapper
def before_execution(self):
"""Before job submission hook."""
pass
def after_execution(self):
"""After job submission hook."""
pass
@execution_hook
def execute(self):
"""Execute a job.
:returns: Job ID.
:rtype: str
"""
raise NotImplementedError
def get_status(self):
"""Get job status.
:returns: job status.
:rtype: str
"""
raise NotImplementedError
def get_logs(self):
"""Get job log.
:returns: stderr, stdout of a job.
:rtype: dict
"""
raise NotImplementedError
def stop(self):
"""Stop a job."""
raise NotImplementedError
def create_job_in_db(self, backend_job_id):
"""Create job in db."""
job_db_entry = JobTable(
backend_job_id=backend_job_id,
workflow_uuid=self.workflow_uuid,
status=JobStatus.created.name,
compute_backend=self.compute_backend,
cvmfs_mounts=self.cvmfs_mounts or "",
shared_file_system=self.shared_file_system or False,
docker_img=self.docker_img,
cmd=json.dumps(self.cmd),
env_vars=json.dumps(self.env_vars),
deleted=False,
job_name=self.job_name,
prettified_cmd=self.prettified_cmd,
)
Session.add(job_db_entry)
Session.commit()
self.job_id = str(job_db_entry.id_)
def cache_job(self):
"""Cache a job."""
workflow = (
Session.query(Workflow).filter_by(id_=self.workflow_uuid).one_or_none()
)
access_times = calculate_file_access_time(workflow.workspace_path)
prepared_job_cache = JobCache()
prepared_job_cache.job_id = self.job_id
prepared_job_cache.access_times = access_times
Session.add(prepared_job_cache)
Session.commit()
def update_job_status(self):
"""Update job status in DB."""
pass
def _extend_env_vars(self, env_vars):
"""Extend environment variables with REANA specific ones."""
prefix = "REANA"
env_vars[prefix + "_WORKSPACE"] = self.workflow_workspace
env_vars[prefix + "_WORKFLOW_UUID"] = str(self.workflow_uuid)
return env_vars
| diegodelemos/reana-job-controller | reana_job_controller/job_manager.py | Python | mit | 4,466 | 0.000224 |
#!/usr/bin/env python
#encode=utf-8
#vim: tabstop=4 shiftwidth=4 softtabstop=4
#Created on 2013-6-24
#Copyright 2013 nuoqingyun xuqifeng
from bson.code import Code
traffic_map = Code("function () {"
"emit(this.domain, this.bytes);"
"}")
traffic_reduce = Code("function (key, values) {"
" var sum = 0;"
" var count = 0;"
" values.forEach(function(byte){"
" sum += byte;"
" count ++;"
"});"
" return {'sum':sum, 'count':count};"
"}")
traffic_reduce1 = Code("function (keyDomain, valuesBytes) {"
" return Array.sum(valuesBytes);"
"}")
traffic_map_test = Code("function () {"
"emit(this.domain, {bytes:this.bytes, visit:1, hits:this.code});"
"}")
traffic_reduce_test = Code("function (key, values) {"
" var sum = 0;"
" var count = 0;"
" var visits = 0;"
" values.forEach(function(vals){"
" sum += vals.bytes;"
" count += vals.hits;"
" visits += vals.visit;"
"});"
" return {bytes:sum, visit:visits, hits:count};"
"}")
| homhei/glance | glance/db/js.py | Python | apache-2.0 | 1,449 | 0.014493 |
from pandac.PandaModules import Vec3
from direct.interval.IntervalGlobal import Sequence, Parallel, Wait, Func
from direct.interval.IntervalGlobal import LerpScaleInterval
from direct.interval.IntervalGlobal import WaitInterval, ActorInterval, FunctionInterval
from direct.task.Task import Task
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import StateData
from toontown.minigame.OrthoWalk import OrthoWalk
from toontown.minigame.MinigameRulesPanel import MinigameRulesPanel
from toontown.parties import PartyGlobals
from direct.fsm import ClassicFSM, State
class PartyCatchActivityToonSD(StateData.StateData):
notify = DirectNotifyGlobal.directNotify.newCategory('PartyCatchActivityToonSD')
FallBackAnim = 'slip-backward'
FallFwdAnim = 'slip-forward'
CatchNeutralAnim = 'catch-neutral'
CatchRunAnim = 'catch-run'
EatNeutralAnim = 'catch-eatneutral'
EatNRunAnim = 'catch-eatnrun'
animList = [FallBackAnim,
FallFwdAnim,
CatchNeutralAnim,
CatchRunAnim,
EatNeutralAnim,
EatNRunAnim]
def __init__(self, avId, activity):
PartyCatchActivityToonSD.notify.debug('init : avId = %s, activity = %s ' % (avId, activity))
self.avId = avId
self.activity = activity
self.isLocal = avId == base.localAvatar.doId
self.toon = self.activity.getAvatar(self.avId)
self.unexpectedExit = False
self.fsm = ClassicFSM.ClassicFSM('CatchActivityAnimFSM-%s' % self.avId, [State.State('init', self.enterInit, self.exitInit, ['notPlaying', 'normal', 'rules']),
State.State('notPlaying', self.enterNotPlaying, self.exitNotPlaying, ['normal', 'rules', 'cleanup']),
State.State('rules', self.enterRules, self.exitRules, ['normal', 'cleanup']),
State.State('normal', self.enterNormal, self.exitNormal, ['eatFruit',
'fallBack',
'fallForward',
'notPlaying']),
State.State('eatFruit', self.enterEatFruit, self.exitEatFruit, ['normal',
'fallBack',
'fallForward',
'eatFruit',
'notPlaying']),
State.State('fallBack', self.enterFallBack, self.exitFallBack, ['normal', 'notPlaying']),
State.State('fallForward', self.enterFallForward, self.exitFallForward, ['normal', 'notPlaying']),
State.State('cleanup', self.enterCleanup, self.exitCleanup, [])], 'init', 'cleanup')
self.enteredAlready = False
def load(self):
self.setAnimState('off', 1.0)
for anim in self.animList:
self.toon.pose(anim, 0)
def unload(self):
del self.fsm
def enter(self):
if not self.enteredAlready:
self.enteredAlready = True
self.fsm.enterInitialState()
self._exiting = False
def exit(self, unexpectedExit = False):
if self._exiting:
return
self._exiting = True
self.unexpectedExit = unexpectedExit
if not self.unexpectedExit:
self.fsm.requestFinalState()
del self._exiting
def enterInit(self):
self.notify.debug('enterInit')
self.toon.startBlink()
self.toon.stopLookAround()
if self.isLocal:
self.activity.initOrthoWalk()
self.dropShadow = self.toon.dropShadow
self.origDropShadowColor = self.dropShadow.getColor()
c = self.origDropShadowColor
alpha = 0.35
self.dropShadow.setColor(c[0], c[1], c[2], alpha)
def exitInit(self):
pass
def enterNotPlaying(self):
self.toon.stopBlink()
self.toon.startLookAround()
self.setAnimState('neutral', 1.0)
if self.isLocal:
self.activity.orthoWalk.stop()
self.dropShadow.setColor(self.origDropShadowColor)
def exitNotPlaying(self):
self.dropShadow = self.toon.dropShadow
self.origDropShadowColor = self.dropShadow.getColor()
c = self.origDropShadowColor
alpha = 0.35
self.dropShadow.setColor(c[0], c[1], c[2], alpha)
def enterRules(self):
if self.isLocal:
self.notify.debug('enterNormal')
self.setAnimState('Catching', 1.0)
self.activity.orthoWalk.stop()
self.accept(self.activity.rulesDoneEvent, self.handleRulesDone)
self.rulesPanel = MinigameRulesPanel('PartyRulesPanel', self.activity.getTitle(), self.activity.getInstructions(), self.activity.rulesDoneEvent, PartyGlobals.DefaultRulesTimeout)
base.setCellsAvailable(base.bottomCells + [base.leftCells[0], base.rightCells[1]], False)
self.rulesPanel.load()
self.rulesPanel.enter()
else:
self.fsm.request('normal')
def handleRulesDone(self):
self.fsm.request('normal')
def exitRules(self):
self.setAnimState('off', 1.0)
self.ignore(self.activity.rulesDoneEvent)
if hasattr(self, 'rulesPanel'):
self.rulesPanel.exit()
self.rulesPanel.unload()
del self.rulesPanel
base.setCellsAvailable(base.bottomCells + [base.leftCells[0], base.rightCells[1]], True)
def enterNormal(self):
self.notify.debug('enterNormal')
self.setAnimState('Catching', 1.0)
if self.isLocal:
self.activity.orthoWalk.start()
self.toon.lerpLookAt(Vec3.forward() + Vec3.up(), time=0.2, blink=0)
def exitNormal(self):
self.setAnimState('off', 1.0)
if self.isLocal:
self.activity.orthoWalk.stop()
self.toon.lerpLookAt(Vec3.forward(), time=0.2, blink=0)
def eatFruit(self, fruitModel, handNode):
if self.fsm.getCurrentState().getName() == 'eatFruit':
self.fsm.request('normal')
self.fsm.request('eatFruit', [fruitModel, handNode])
def enterEatFruit(self, fruitModel, handNode):
self.notify.debug('enterEatFruit')
self.setAnimState('CatchEating', 1.0)
if self.isLocal:
self.activity.orthoWalk.start()
self.fruitModel = fruitModel
renderScale = fruitModel.getScale(render)
fruitModel.reparentTo(handNode)
fruitModel.setScale(render, renderScale)
duration = self.toon.getDuration('catch-eatneutral')
self.eatIval = Sequence(Parallel(WaitInterval(duration), Sequence(LerpScaleInterval(fruitModel, duration / 2.0, fruitModel.getScale() * 0.5, blendType='easeInOut'), Func(fruitModel.hide))), Func(self.fsm.request, 'normal'), name=self.toon.uniqueName('eatingIval'))
self.eatIval.start()
def exitEatFruit(self):
self.eatIval.pause()
del self.eatIval
self.fruitModel.reparentTo(hidden)
self.fruitModel.removeNode()
del self.fruitModel
self.setAnimState('off', 1.0)
if self.isLocal:
self.activity.orthoWalk.stop()
def enterFallBack(self):
self.notify.debug('enterFallBack')
if self.isLocal:
base.playSfx(self.activity.sndOof)
duration = 1.0
animName = self.FallBackAnim
startFrame = 12
totalFrames = self.toon.getNumFrames(animName)
frames = totalFrames - 1 - startFrame
frameRate = self.toon.getFrameRate(animName)
newRate = frames / duration
playRate = newRate / frameRate
def resume(self = self):
self.fsm.request('normal')
self.fallBackIval = Sequence(ActorInterval(self.toon, animName, startTime=startFrame / newRate, endTime=totalFrames / newRate, playRate=playRate), FunctionInterval(resume))
self.fallBackIval.start()
def exitFallBack(self):
self.fallBackIval.pause()
del self.fallBackIval
def enterFallForward(self):
self.notify.debug('enterFallForward')
if self.isLocal:
base.playSfx(self.activity.sndOof)
duration = 2.0
animName = self.FallFwdAnim
startFrame = 12
totalFrames = self.toon.getNumFrames(animName)
frames = totalFrames - 1 - startFrame
pauseFrame = 19
frameRate = self.toon.getFrameRate(animName)
newRate = frames / (duration * 0.5)
playRate = newRate / frameRate
def resume(self = self):
self.fsm.request('normal')
self.fallFwdIval = Sequence(ActorInterval(self.toon, animName, startTime=startFrame / newRate, endTime=pauseFrame / newRate, playRate=playRate), WaitInterval(duration / 2.0), ActorInterval(self.toon, animName, startTime=pauseFrame / newRate, endTime=totalFrames / newRate, playRate=playRate), FunctionInterval(resume))
self.fallFwdIval.start()
def exitFallForward(self):
self.fallFwdIval.pause()
del self.fallFwdIval
def enterCleanup(self):
self.notify.debug('enterCleanup')
self.toon.stopBlink()
self.toon.startLookAround()
if self.isLocal:
self.activity.orthoWalk.stop()
self.activity.destroyOrthoWalk()
self.dropShadow.setColor(self.origDropShadowColor)
def exitCleanup(self):
pass
def setAnimState(self, newState, playRate):
if not self.unexpectedExit:
self.toon.setAnimState(newState, playRate)
else:
self.notify.debug('setAnimState(): Toon unexpectedExit flag is set.')
| ksmit799/Toontown-Source | toontown/parties/PartyCatchActivityToonSD.py | Python | mit | 9,299 | 0.003979 |
import datetime
try:
import cPickle as pickle
except ImportError:
import pickle
from django.db import models
from django.db.models.query import QuerySet
from django.conf import settings
from django.core.urlresolvers import reverse
from django.template import Context
from django.template.loader import render_to_string
from django.core.exceptions import ImproperlyConfigured
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
from django.contrib.auth.models import AnonymousUser
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext, get_language, activate
from django.core.mail import EmailMultiAlternatives
QUEUE_ALL = getattr(settings, "NOTIFICATION_QUEUE_ALL", False)
class LanguageStoreNotAvailable(Exception):
pass
class NoticeType(models.Model):
label = models.CharField(_('label'), max_length=40)
display = models.CharField(_('display'), max_length=50)
description = models.CharField(_('description'), max_length=100)
# by default only on for media with sensitivity less than or equal to this number
default = models.IntegerField(_('default'))
def __unicode__(self):
return self.label
class Meta:
verbose_name = _("notice type")
verbose_name_plural = _("notice types")
# if this gets updated, the create() method below needs to be as well...
NOTICE_MEDIA = (
("1", _("Email")),
)
# how spam-sensitive is the medium
NOTICE_MEDIA_DEFAULTS = {
"1": 2 # email
}
class NoticeSetting(models.Model):
"""
Indicates, for a given user, whether to send notifications
of a given type to a given medium.
"""
user = models.ForeignKey(User, verbose_name=_('user'))
notice_type = models.ForeignKey(NoticeType, verbose_name=_('notice type'))
medium = models.CharField(_('medium'), max_length=1, choices=NOTICE_MEDIA)
send = models.BooleanField(_('send'))
class Meta:
verbose_name = _("notice setting")
verbose_name_plural = _("notice settings")
unique_together = ("user", "notice_type", "medium")
def get_notification_setting(user, notice_type, medium):
try:
return NoticeSetting.objects.get(
user=user, notice_type=notice_type, medium=medium)
except NoticeSetting.DoesNotExist:
default = (NOTICE_MEDIA_DEFAULTS[medium] <= notice_type.default)
# sometimes other thread already created this
setting, created = NoticeSetting.objects.get_or_create(
user=user, notice_type=notice_type, medium=medium, send=default)
setting.save()
return setting
def should_send(user, notice_type, medium):
if not user.is_active:
return False
return get_notification_setting(user, notice_type, medium).send
class NoticeManager(models.Manager):
def notices_for(self, user, archived=False, unseen=None, on_site=None):
"""
returns Notice objects for the given user.
If archived=False, it only include notices not archived.
If archived=True, it returns all notices for that user.
If unseen=None, it includes all notices.
If unseen=True, return only unseen notices.
If unseen=False, return only seen notices.
"""
if archived:
qs = self.filter(user=user)
else:
qs = self.filter(user=user, archived=archived)
if unseen is not None:
qs = qs.filter(unseen=unseen)
if on_site is not None:
qs = qs.filter(on_site=on_site)
return qs
def unseen_count_for(self, user, **kwargs):
"""
returns the number of unseen notices for the given user but does not
mark them seen
"""
return self.notices_for(user, unseen=True, **kwargs).count()
class Notice(models.Model):
user = models.ForeignKey(User, verbose_name=_('user'))
message = models.TextField(_('message'))
notice_type = models.ForeignKey(NoticeType, verbose_name=_('notice type'))
added = models.DateTimeField(_('added'), default=datetime.datetime.now)
unseen = models.BooleanField(_('unseen'), default=True)
archived = models.BooleanField(_('archived'), default=False)
on_site = models.BooleanField(_('on site'))
objects = NoticeManager()
def __unicode__(self):
return self.message
def archive(self):
self.archived = True
self.save()
def is_unseen(self):
"""
returns value of self.unseen but also changes it to false.
Use this in a template to mark an unseen notice differently the first
time it is shown.
"""
unseen = self.unseen
if unseen:
self.unseen = False
self.save()
return unseen
class Meta:
ordering = ["-added"]
verbose_name = _("notice")
verbose_name_plural = _("notices")
def get_absolute_url(self):
return ("notification_notice", [str(self.pk)])
get_absolute_url = models.permalink(get_absolute_url)
class NoticeQueueBatch(models.Model):
"""
A queued notice.
Denormalized data for a notice.
"""
pickled_data = models.TextField()
def create_notice_type(label, display, description, default=2, verbosity=1):
"""
Creates a new NoticeType.
This is intended to be used by other apps as a post_syncdb manangement step.
"""
try:
notice_type = NoticeType.objects.get(label=label)
updated = False
if display != notice_type.display:
notice_type.display = display
updated = True
if description != notice_type.description:
notice_type.description = description
updated = True
if default != notice_type.default:
notice_type.default = default
updated = True
if updated:
notice_type.save()
if verbosity > 1:
print "Updated %s NoticeType" % label
except NoticeType.DoesNotExist:
NoticeType(label=label, display=display, description=description, default=default).save()
if verbosity > 1:
print "Created %s NoticeType" % label
def get_notification_language(user):
"""
Returns site-specific notification language for this user. Raises
LanguageStoreNotAvailable if this site does not use translated
notifications.
"""
if getattr(settings, 'NOTIFICATION_LANGUAGE_MODULE', False):
try:
app_label, model_name = settings.NOTIFICATION_LANGUAGE_MODULE.split('.')
model = models.get_model(app_label, model_name)
language_model = model._default_manager.get(user__id__exact=user.id)
if hasattr(language_model, 'language'):
return language_model.language
except (ImportError, ImproperlyConfigured, model.DoesNotExist):
raise LanguageStoreNotAvailable
raise LanguageStoreNotAvailable
def get_formatted_messages(formats, label, context):
"""
Returns a dictionary with the format identifier as the key. The values are
are fully rendered templates with the given context.
"""
format_templates = {}
for format in formats:
# conditionally turn off autoescaping for .txt extensions in format
if format.endswith(".txt"):
context.autoescape = False
else:
context.autoescape = True
format_templates[format] = render_to_string((
'notification/%s/%s' % (label, format),
'notification/%s' % format), context_instance=context)
return format_templates
def send_now(users, label, extra_context=None, on_site=True):
"""
Creates a new notice.
This is intended to be how other apps create new notices.
notification.send(user, 'friends_invite_sent', {
'spam': 'eggs',
'foo': 'bar',
)
You can pass in on_site=False to prevent the notice emitted from being
displayed on the site.
"""
if extra_context is None:
extra_context = {}
notice_type = NoticeType.objects.get(label=label)
current_site = Site.objects.get_current()
notices_url = u"http://%s%s" % (
unicode(current_site),
reverse("notification_notices"),
)
current_language = get_language()
formats = (
'short.txt',
'full.txt',
'notice.html',
'full.html',
'email_full.html',
) # TODO make formats configurable
for user in users:
recipients = []
# get user language for user from language store defined in
# NOTIFICATION_LANGUAGE_MODULE setting
try:
language = get_notification_language(user)
except LanguageStoreNotAvailable:
language = None
if language is not None:
# activate the user's language
activate(language)
# update context with user specific translations
context = Context({
"user": user,
"notice": ugettext(notice_type.display),
"notices_url": notices_url,
"current_site": current_site,
'MEDIA_URL': settings.MEDIA_URL,
})
context.update(extra_context)
# get prerendered format messages
messages = get_formatted_messages(formats, label, context)
# Strip newlines from subject
subject = ''.join(render_to_string('notification/email_subject.txt', {
'message': messages['short.txt'],
}, context).splitlines())
body = render_to_string('notification/email_body.txt', {
'message': messages['full.txt'],
}, context)
html = render_to_string('notification/email_body.html',{
'message': messages['email_full.html'],
}, context)
#notice = Notice.objects.create(user=user, message=messages['notice.html'], notice_type=notice_type, on_site=on_site)
if should_send(user, notice_type, "1") and user.email \
and user.is_active: # Email
recipients.append(user.email)
msg = EmailMultiAlternatives(subject, body,
settings.DEFAULT_FROM_EMAIL,
recipients)
msg.attach_alternative(html, "text/html")
msg.send()
# reset environment to original language
activate(current_language)
def send(*args, **kwargs):
"""
A basic interface around both queue and send_now. This honors a global
flag NOTIFICATION_QUEUE_ALL that helps determine whether all calls should
be queued or not. A per call ``queue`` or ``now`` keyword argument can be
used to always override the default global behavior.
"""
queue_flag = kwargs.pop("queue", False)
now_flag = kwargs.pop("now", False)
assert not (queue_flag and now_flag), "'queue' and 'now' cannot both be True."
if queue_flag:
return queue(*args, **kwargs)
elif now_flag:
return send_now(*args, **kwargs)
else:
if QUEUE_ALL:
return queue(*args, **kwargs)
else:
return send_now(*args, **kwargs)
def queue(users, label, extra_context=None, on_site=True):
"""
Queue the notification in NoticeQueueBatch. This allows for large amounts
of user notifications to be deferred to a seperate process running outside
the webserver.
"""
if extra_context is None:
extra_context = {}
if isinstance(users, QuerySet):
users = [row["pk"] for row in users.values("pk")]
else:
users = [user.pk for user in users]
notices = []
for user in users:
notices.append((user, label, extra_context, on_site))
NoticeQueueBatch(pickled_data=pickle.dumps(notices).encode("base64")).save()
class ObservedItemManager(models.Manager):
def all_for(self, observed, signal):
"""
Returns all ObservedItems for an observed object,
to be sent when a signal is emited.
"""
content_type = ContentType.objects.get_for_model(observed)
observed_items = self.filter(content_type=content_type, object_id=observed.id, signal=signal)
return observed_items
def get_for(self, observed, observer, signal):
content_type = ContentType.objects.get_for_model(observed)
observed_item = self.get(content_type=content_type, object_id=observed.id, user=observer, signal=signal)
return observed_item
class ObservedItem(models.Model):
user = models.ForeignKey(User, verbose_name=_('user'))
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
observed_object = generic.GenericForeignKey('content_type', 'object_id')
notice_type = models.ForeignKey(NoticeType, verbose_name=_('notice type'))
added = models.DateTimeField(_('added'), default=datetime.datetime.now)
# the signal that will be listened to send the notice
signal = models.TextField(verbose_name=_('signal'))
objects = ObservedItemManager()
class Meta:
ordering = ['-added']
verbose_name = _('observed item')
verbose_name_plural = _('observed items')
def send_notice(self):
send([self.user], self.notice_type.label,
{'observed': self.observed_object})
def observe(observed, observer, notice_type_label, signal='post_save'):
"""
Create a new ObservedItem.
To be used by applications to register a user as an observer for some object.
"""
notice_type = NoticeType.objects.get(label=notice_type_label)
observed_item = ObservedItem(user=observer, observed_object=observed,
notice_type=notice_type, signal=signal)
observed_item.save()
return observed_item
def stop_observing(observed, observer, signal='post_save'):
"""
Remove an observed item.
"""
observed_item = ObservedItem.objects.get_for(observed, observer, signal)
observed_item.delete()
def send_observation_notices_for(observed, signal='post_save'):
"""
Send a notice for each registered user about an observed object.
"""
observed_items = ObservedItem.objects.all_for(observed, signal)
for observed_item in observed_items:
observed_item.send_notice()
return observed_items
def is_observing(observed, observer, signal='post_save'):
if isinstance(observer, AnonymousUser):
return False
try:
observed_items = ObservedItem.objects.get_for(observed, observer, signal)
return True
except ObservedItem.DoesNotExist:
return False
except ObservedItem.MultipleObjectsReturned:
return True
def handle_observations(sender, instance, *args, **kw):
send_observation_notices_for(instance)
| soad241/django-notification | notification/models.py | Python | mit | 14,929 | 0.002612 |
# coding=utf-8
# Copyright 2018 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for eager_pg.trajectory_batch_stats.
Note that the explicit .numpy() casting also implicitly checks that the methods
all return tensors and not numpy arrays.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from eager_pg import trajectory_batch_stats
import tensorflow as tf
tbs = trajectory_batch_stats
TEST_MASK = [[1, 1, 1, 1],
[1, 1, 1, 0],
[1, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0]] # pyformat: disable
# Generally masks will be floats so we can easily multiply tensors.
NP_TEST_MASK = np.array(TEST_MASK, dtype=np.float32)
class TrajectoryBatchStatsTest(tf.test.TestCase, parameterized.TestCase):
"""Tests to ensure that statistics on batches of trajectory are correct."""
@property
def expected_lengths(self):
return tf.constant([3, 5, 2, 1], dtype=tf.float32)
def test_get_trajectory_lengths(self):
"""Checks if the length of each trajectory in the batch is correct."""
# pylint: disable=invalid-name
TF_TEST_MASK = tf.constant(NP_TEST_MASK)
TF_TEST_MASK_TF_F64 = tf.cast(TF_TEST_MASK, tf.float64)
NP_TEST_MASK_NP_F64 = NP_TEST_MASK.astype(np.float64)
ALL_MASKS = [
TF_TEST_MASK, NP_TEST_MASK, TF_TEST_MASK_TF_F64, NP_TEST_MASK_NP_F64
]
# pylint: enable=invalid-name
for mask in ALL_MASKS:
computed_lengths = tbs.get_trajectory_lengths(mask)
self.assertTrue(np.allclose(computed_lengths, self.expected_lengths))
def run_without_lengths(self, stats_function, args):
"""Helper function to run stats."""
return stats_function(*args)
def run_with_lengths(self, stats_function, args):
"""Helper function to run stats with precomputed lengths."""
return stats_function(*args, trajectory_lengths=self.expected_lengths)
@parameterized.named_parameters(
dict(
testcase_name='rewards',
raw_batch=np.array([[1, 2, 3, 4]] * 5).astype(np.float32),
statistic_function=tbs.reward_summaries,
expected_results_with_traj={
'mean_step_reward': (3. / 3 + 10. / 5 + 6. / 2 + 4. / 1) / 4.0,
'mean_trajectory_reward': (3. + 10. + 6. + 4.) / 4.0,
'stderr_trajectory_reward': np.sqrt(np.sum(
(np.array([3., 10., 6., 4.]) -
(3. + 10. + 6. + 4.) / 4.0)**2 / 3) / 4)
},
expected_results_no_traj={
'mean_trajectory_reward': (5 + 10 + 15 + 20) / 4.0,
'mean_step_reward': (1 + 2 + 3 + 4) / 4.0
}),
dict(
testcase_name='entropies',
raw_batch=np.array([[1, 2, 3, 4]] * 5).astype(np.float32),
statistic_function=tbs.entropy_summaries,
expected_results_with_traj={
'mean_step_entropy': (3. / 3 + 10. / 5 + 6. / 2 + 4. / 1) / 4.0,
'mean_trajectory_entropy': (3. + 10. + 6. + 4.) / 4.0
}),
)
def test_calculations(self,
raw_batch,
statistic_function,
expected_results_with_traj,
expected_results_no_traj=None): # pylint: disable=g-doc-args
"""Test calculations of statistc_name on raw_batch using statistic_function.
"""
stats = []
stats.append(
self.run_with_lengths(statistic_function, (raw_batch, NP_TEST_MASK)))
stats.append(
self.run_without_lengths(statistic_function, (raw_batch, NP_TEST_MASK)))
for stat in stats:
for expected_key in expected_results_with_traj.keys():
self.assertAllClose(stat[expected_key].numpy(),
expected_results_with_traj[expected_key])
if expected_results_no_traj is not None:
stat = self.run_without_lengths(statistic_function, (raw_batch,))
for expected_key in expected_results_no_traj.keys():
self.assertAllClose(stat[expected_key].numpy(),
expected_results_no_traj[expected_key])
def test_reward_calculations_errors(self):
"""Ensures that the reward calculations return the correct errors."""
rewards_as_list = [[1, 2, 3, 4]] * 5
self.assertRaises(TypeError, tbs.reward_summaries, rewards_as_list, None)
rewards_as_numpy = np.array(rewards_as_list)
rewards_as_numpy_wrong_shape = np.expand_dims(rewards_as_numpy, 1)
self.assertRaises(ValueError, tbs.reward_summaries,
rewards_as_numpy_wrong_shape, None)
# TODO(zaf): Find a way to @parameterized this?
def test_returns_calculations(self):
test_returns = np.array([[0.125, 1.875, 0.25, 1.5], [0.25, 1.75, 0.5, 1.0],
[0.5, 1.5, 1.0, 0.0]])
stats = tbs.return_summaries(test_returns)
expected_mean_return = (0.125 + 1.875 + 0.25 + 1.5) / 4.0
self.assertEqual(stats['mean_trajectory_return'].numpy(),
expected_mean_return)
pop_variance = np.sum((test_returns[0] - expected_mean_return)**2 / 3)
standard_error = np.sqrt(pop_variance) / np.sqrt(4)
self.assertTrue(
np.allclose(stats['stderr_trajectory_return'].numpy(), standard_error))
if __name__ == '__main__':
tf.enable_eager_execution()
tf.test.main()
| google-research/policy-learning-landscape | eager_pg/trajectory_batch_stats_test.py | Python | apache-2.0 | 5,906 | 0.003725 |
#!/usr/bin/env python
# This file is part of nexdatas - Tango Server for NeXus data writer
#
# Copyright (C) 2012-2017 DESY, Jan Kotanski <jkotan@mail.desy.de>
#
# nexdatas is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# nexdatas is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with nexdatas. If not, see <http://www.gnu.org/licenses/>.
# \package test nexdatas
# \file DefinitionDlgTest.py
# unittests for field Tags running Tango Server
#
import unittest
import os
import sys
import random
import struct
import binascii
import time
from PyQt5.QtTest import QTest
from PyQt5.QtWidgets import (QApplication, QMessageBox, QTableWidgetItem,
QPushButton)
from PyQt5.QtCore import Qt, QTimer
from PyQt5.QtXml import QDomDocument
from nxsconfigtool.DefinitionDlg import DefinitionDlg
from nxsconfigtool.ComponentModel import ComponentModel
from nxsconfigtool.AttributeDlg import AttributeDlg
from nxsconfigtool.NodeDlg import NodeDlg
# from nxsconfigtool.ui.ui_definitiondlg import Ui_DefinitionDlg
from nxsconfigtool.DomTools import DomTools
# Qt-application
app = None
if sys.version_info > (3,):
unicode = str
long = int
# if 64-bit machione
IS64BIT = (struct.calcsize("P") == 8)
class TestView(object):
def __init__(self, model):
self.testIndex = None
self.testModel = model
self.stack = []
def currentIndex(self):
return self.testIndex
def model(self):
return self.testModel
def expand(self, index):
self.stack.append("expand")
self.stack.append(index)
# test fixture
class DefinitionDlgTest(unittest.TestCase):
# constructor
# \param methodName name of the test method
def __init__(self, methodName):
unittest.TestCase.__init__(self, methodName)
self._bint = "int64" if IS64BIT else "int32"
self._buint = "uint64" if IS64BIT else "uint32"
self._bfloat = "float64" if IS64BIT else "float32"
# MessageBox text
self.text = None
# MessageBox title
self.title = None
# attribute name
self.aname = "myname"
# attribute value
self.avalue = "myentry"
# action status
self.performed = False
try:
self.__seed = long(binascii.hexlify(os.urandom(16)), 16)
except NotImplementedError:
self.__seed = long(time.time() * 256)
self.__rnd = random.Random(self.__seed)
# test starter
# \brief Common set up
def setUp(self):
print("\nsetting up...")
print("SEED = %s" % self.__seed)
# test closer
# \brief Common tear down
def tearDown(self):
print("tearing down ...")
def checkMessageBox(self):
# self.assertEqual(QApplication.activeWindow(), None)
mb = QApplication.activeModalWidget()
self.assertTrue(isinstance(mb, QMessageBox))
# print mb.text()
self.text = mb.text()
self.title = mb.windowTitle()
mb.close()
def rmAttributeWidget(self):
# aw =
QApplication.activeWindow()
mb = QApplication.activeModalWidget()
# print "CLASS", mb
# print "CLASS2", aw
self.assertTrue(isinstance(mb, QMessageBox))
self.text = mb.text()
self.title = mb.windowTitle()
QTest.mouseClick(mb.button(QMessageBox.Yes), Qt.LeftButton)
def rmAttributeWidgetClose(self):
# aw =
QApplication.activeWindow()
mb = QApplication.activeModalWidget()
self.assertTrue(isinstance(mb, QMessageBox))
self.text = mb.text()
self.title = mb.windowTitle()
QTest.mouseClick(mb.button(QMessageBox.No), Qt.LeftButton)
def attributeWidget(self):
# aw = QApplication.activeWindow()
mb = QApplication.activeModalWidget()
self.assertTrue(isinstance(mb, AttributeDlg))
QTest.keyClicks(mb.ui.nameLineEdit, self.aname)
self.assertEqual(mb.ui.nameLineEdit.text(), self.aname)
QTest.keyClicks(mb.ui.valueLineEdit, self.avalue)
self.assertEqual(mb.ui.valueLineEdit.text(), self.avalue)
mb.accept()
def attributeWidgetClose(self):
# aw =
QApplication.activeWindow()
mb = QApplication.activeModalWidget()
self.assertTrue(isinstance(mb, AttributeDlg))
QTest.keyClicks(mb.ui.nameLineEdit, self.aname)
self.assertEqual(mb.ui.nameLineEdit.text(), self.aname)
QTest.keyClicks(mb.ui.valueLineEdit, self.avalue)
self.assertEqual(mb.ui.valueLineEdit.text(), self.avalue)
# mb.close()
mb.reject()
# mb.accept()
# constructor test
# \brief It tests default settings
def test_constructor(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.show()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.node, None)
self.assertEqual(form.root, None)
self.assertEqual(form.view, None)
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
self.assertTrue(isinstance(form, NodeDlg))
self.assertEqual(form.externalApply, None)
self.assertEqual(form.externalDSLink, None)
self.assertEqual(form.replaceText,
super(DefinitionDlg, form).replaceText)
self.assertEqual(form.removeElement,
super(DefinitionDlg, form).removeElement)
self.assertEqual(form.replaceElement,
super(DefinitionDlg, form).replaceElement)
self.assertEqual(form.appendElement,
super(DefinitionDlg, form).appendElement)
self.assertEqual(form.reset, super(DefinitionDlg, form).reset)
# constructor test
# \brief It tests default settings
def test_constructor_accept(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.show()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute",
"link", "component", "doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(form.ui.applyPushButton.isEnabled())
self.assertTrue(form.ui.resetPushButton.isEnabled())
name = "myname"
content = "$components.default"
QTest.keyClicks(form.ui.nameLineEdit, name)
self.assertEqual(form.ui.nameLineEdit.text(), name)
QTest.keyClicks(form.ui.contentTextEdit, content)
self.assertEqual(form.ui.contentTextEdit.toPlainText(), content)
self.assertTrue(bool(form.ui.nameLineEdit.text()))
self.assertTrue(bool(form.ui.contentTextEdit.toPlainText()))
QTest.mouseClick(form.ui.applyPushButton, Qt.LeftButton)
# form.apply()
# self.assertEqual(form.name, name)
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_updateForm(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.show()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute",
"link", "component", "doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
name = "myname"
content = "NXEntry"
doc = "My documentation: \n ble ble ble "
attributes = {"myattr": "myvalue", "myattr2": "myvalue2",
"myattr3": "myvalue3"}
self.assertEqual(form.updateForm(), None)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
form.name = name
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.updateForm(), None)
self.assertEqual(form.ui.nameLineEdit.text(), name)
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
form.ui.nameLineEdit.setText("")
form.name = ""
form.content = content
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.updateForm(), None)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertEqual(form.ui.contentTextEdit.toPlainText(), content)
self.assertTrue(not form.ui.docTextEdit.toPlainText())
form.ui.contentTextEdit.setText("")
form.doc = doc
form.content = ""
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.updateForm(), None)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertEqual(form.ui.docTextEdit.toPlainText(), doc)
form.ui.docTextEdit.setText("")
form.name = name
form.doc = doc
form.content = content
form.attributes = attributes
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.updateForm(), None)
self.assertEqual(form.ui.contentTextEdit.toPlainText(), content)
self.assertEqual(form.ui.nameLineEdit.text(), name)
self.assertEqual(form.ui.docTextEdit.toPlainText(), doc)
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes))
for i in range(len(attributes)):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = form.ui.attributeTableWidget.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
self.assertEqual(form.ui.docTextEdit.toPlainText(), doc)
QTest.mouseClick(form.ui.applyPushButton, Qt.LeftButton)
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_getState(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.show()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(
form.subItems,
["group", "field", "attribute", "link", "component", "doc",
"symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
name = "myname"
content = "NXEntry"
doc = "My documentation: \n ble ble ble "
attributes = {"myattr": "myvalue", "myattr2": "myvalue2",
"myattr3": "myvalue3"}
self.assertEqual(form.getState(), ('', '', '', {}))
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
form.name = name
self.assertEqual(form.getState(), (name, '', '', {}))
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
form.name = ""
form.content = content
self.assertEqual(form.getState(), ('', content, '', {}))
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
form.doc = doc
form.content = ""
self.assertEqual(form.getState(), ('', '', doc, {}))
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
form.doc = ""
form.content = ""
form.attributes = attributes
state = form.getState()
self.assertEqual(state[0], '')
self.assertEqual(state[1], '')
self.assertEqual(state[2], '')
self.assertEqual(len(state), 4)
self.assertEqual(len(state[3]), len(attributes))
for at in attributes:
self.assertEqual(attributes[at], state[3][at])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
form.name = name
form.doc = doc
form.content = content
form.attributes = attributes
state = form.getState()
self.assertEqual(state[0], name)
self.assertEqual(state[1], content)
self.assertEqual(state[2], doc)
self.assertEqual(len(state), 4)
self.assertTrue(state[3] is not attributes)
self.assertEqual(len(state[3]), len(attributes))
for at in attributes:
self.assertEqual(attributes[at], state[3][at])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
QTest.mouseClick(form.ui.applyPushButton, Qt.LeftButton)
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_setState(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.show()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(
form.subItems,
["group", "field", "attribute",
"link", "component", "doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
name = "myname"
content = "NXEntry"
doc = "My documentation: \n ble ble ble "
attributes = {"myattr": "myvalue", "myattr2": "myvalue2",
"myattr3": "myvalue3"}
self.assertEqual(form.setState(['', '', '', {}]), None)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.setState([name, '', '', {}]), None)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.name, name)
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
form.name = ""
self.assertEqual(form.setState(['', content, '', {}]), None)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
self.assertEqual(form.name, '')
self.assertEqual(form.content, content)
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
form.content = ""
self.assertEqual(form.setState(['', '', doc, {}]), None)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, doc)
self.assertEqual(form.attributes, {})
form.doc = ""
# form.attributes = attributes
self.assertEqual(form.setState(['', '', '', attributes]), None)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, attributes)
self.assertTrue(form.attributes is not attributes)
self.assertEqual(form.setState([name, content, doc, attributes]), None)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.name, name)
self.assertEqual(form.content, content)
self.assertEqual(form.doc, doc)
self.assertEqual(form.attributes, attributes)
self.assertTrue(form.attributes is not attributes)
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
QTest.mouseClick(form.ui.applyPushButton, Qt.LeftButton)
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_createGUI(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.show()
form.createGUI()
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
name = "myname"
content = "NXEntry"
doc = "My documentation: \n ble ble ble "
attributes = {"myattr": "myvalue", "myattr2": "myvalue2",
"myattr3": "myvalue3"}
form = DefinitionDlg()
form.show()
form.createGUI()
self.assertEqual(form.ui.nameLineEdit.text(), '')
self.assertEqual(form.ui.contentTextEdit.toPlainText(), '')
self.assertEqual(form.ui.docTextEdit.toPlainText(), '')
form = DefinitionDlg()
form.show()
form.name = name
form.createGUI()
self.assertEqual(form.ui.nameLineEdit.text(), name)
self.assertEqual(form.ui.contentTextEdit.toPlainText(), '')
self.assertEqual(form.ui.docTextEdit.toPlainText(), '')
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
form = DefinitionDlg()
form.show()
form.content = content
form.createGUI()
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertEqual(form.ui.contentTextEdit.toPlainText(), content)
self.assertTrue(not form.ui.docTextEdit.toPlainText())
form = DefinitionDlg()
form.show()
form.doc = doc
form.createGUI()
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertEqual(form.ui.docTextEdit.toPlainText(), doc)
form = DefinitionDlg()
form.show()
form.name = name
form.doc = doc
form.content = content
form.attributes = attributes
form.createGUI()
self.assertEqual(form.ui.contentTextEdit.toPlainText(), content)
self.assertEqual(form.ui.nameLineEdit.text(), name)
self.assertEqual(form.ui.docTextEdit.toPlainText(), doc)
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes))
for i in range(len(attributes)):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = form.ui.attributeTableWidget.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
self.assertEqual(form.ui.docTextEdit.toPlainText(), doc)
QTest.mouseClick(form.ui.applyPushButton, Qt.LeftButton)
# form.apply()
# self.assertEqual(form.name, name)
# self.assertEqual(form.content, content)
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_setFromNode(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
form.setFromNode()
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
"".join(["\nText\n %s\n" % n for n in range(ndcs)]).strip())
self.assertEqual(
form.attributes, {u'shortname': u'mynshort%s' % nn,
u'type': u'mytype%s' % nn,
u'unit': u'myunits%s' % nn})
self.assertEqual(
form.subItems,
["group", "field", "attribute", "link", "component", "doc",
"symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
# constructor test
# \brief It tests default settings
def test_setFromNode_parameter(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
# form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(
form.subItems,
["group", "field", "attribute", "link", "component", "doc",
"symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(
form.subItems,
["group", "field", "attribute", "link", "component", "doc",
"symbols"])
form.setFromNode(qdn)
self.assertEqual(form.node, qdn)
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
"".join(["\nText\n %s\n" % n for n in range(ndcs)]).strip())
self.assertEqual(
form.attributes,
{u'shortname': u'mynshort%s' % nn,
u'type': u'mytype%s' % nn, u'unit': u'myunits%s' % nn})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
# constructor test
# \brief It tests default settings
def test_setFromNode_noNode(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
# form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
form.setFromNode()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
# constructor test
# \brief It tests default settings
def test_setFromNode_clean(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
# dks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
doc.appendChild(qdn)
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
form.setFromNode()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
# constructor test
# \brief It tests default settings
def test_populateAttribute_setFromNode(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
form.setFromNode()
attributes = {u'shortname': u'mynshort%s' % nn,
u'type': u'mytype%s' % nn, u'unit': u'myunits%s' % nn}
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
"".join(["\nText\n %s\n" % n for n in range(ndcs)]).strip())
self.assertEqual(form.attributes, attributes)
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
form.populateAttributes()
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes))
for i in range(len(attributes)):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = form.ui.attributeTableWidget.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
self.assertEqual(item, None)
# constructor test
# \brief It tests default settings
def test_populateAttribute_setFromNode_selected_wrong(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
form.setFromNode()
attributes = {
u'shortname': u'mynshort%s' % nn, u'type': u'mytype%s' % nn,
u'unit': u'myunits%s' % nn}
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
"".join(["\nText\n %s\n" % n for n in range(ndcs)]).strip())
self.assertEqual(form.attributes, attributes)
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
form.populateAttributes("ble")
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes))
for i in range(len(attributes)):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = form.ui.attributeTableWidget.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
self.assertEqual(item, None)
# constructor test
# \brief It tests default settings
def test_populateAttribute_setFromNode_selected(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
form.setFromNode()
attributes = {u'shortname': u'mynshort%s' % nn,
u'type': u'mytype%s' % nn, u'unit': u'myunits%s' % nn}
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
"".join(["\nText\n %s\n" % n for n in range(ndcs)]).strip())
self.assertEqual(form.attributes, attributes)
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
na = self.__rnd.randint(0, len(attributes)-1)
sel = list(attributes.keys())[na]
form.populateAttributes(sel)
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes))
for i in range(len(attributes)):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = form.ui.attributeTableWidget.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
self.assertEqual(item.data(Qt.UserRole), sel)
# constructor test
# \brief It tests default settings
def test_populateAttribute_setFromNode_selected_addAttribute(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute",
"link", "component", "doc", "symbols"])
form.setFromNode()
attributes = {u'shortname': u'mynshort%s' % nn,
u'type': u'mytype%s' % nn,
u'unit': u'myunits%s' % nn}
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
"".join(["\nText\n %s\n" % n for n in range(ndcs)]).strip())
self.assertEqual(form.attributes, attributes)
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
na = self.__rnd.randint(0, len(attributes)-1)
sel = list(attributes.keys())[na]
form.populateAttributes(sel)
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes))
for i in range(len(attributes)):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = form.ui.attributeTableWidget.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
self.assertEqual(item.data(Qt.UserRole), sel)
self.aname = "addedAttribute"
self.avalue = "addedAttributeValue"
QTimer.singleShot(10, self.attributeWidgetClose)
QTest.mouseClick(form.ui.addPushButton, Qt.LeftButton)
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes))
for i in range(len(attributes)):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = form.ui.attributeTableWidget.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
self.assertEqual(item.data(Qt.UserRole), sel)
self.aname = "addedAttribute"
self.avalue = "addedAttributeValue"
QTimer.singleShot(10, self.attributeWidget)
QTest.mouseClick(form.ui.addPushButton, Qt.LeftButton)
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes)+1)
for i in range(len(attributes)+1):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
it2 = form.ui.attributeTableWidget.item(i, 1)
if k in attributes.keys():
self.assertEqual(it2.text(), attributes[k])
else:
self.assertEqual(it2.text(), self.avalue)
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
self.assertEqual(item.data(Qt.UserRole), self.aname)
# constructor test
# \brief It tests default settings
def test_populateAttribute_setFromNode_selected_removeAttribute(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
qdn.setAttribute("logname", "mynlong%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
form.setFromNode()
attributes = {u'type': u'mytype%s' % nn,
u'shortname': u'mynshort%s' % nn,
u'logname': u'mynlong%s' % nn,
u'unit': u'myunits%s' % nn}
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
"".join(["\nText\n %s\n" % n for n in range(ndcs)]).strip())
self.assertEqual(form.attributes, attributes)
self.assertEqual(form.subItems,
["group", "field", "attribute", "link",
"component", "doc", "symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(form.ui.attributeTableWidget.rowCount(), 0)
na = self.__rnd.randint(0, len(attributes)-1)
sel = list(attributes.keys())[na]
form.populateAttributes(sel)
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes))
for i in range(len(attributes)):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = form.ui.attributeTableWidget.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
self.assertEqual(item.data(Qt.UserRole), sel)
aname = self.__rnd.choice(list(attributes.keys()))
# avalue =
attributes[aname]
form.populateAttributes(aname)
# QTimer.singleShot(10, self.rmAttributeWidget)
QTest.mouseClick(form.ui.removePushButton, Qt.LeftButton)
# self.assertEqual(self.text, "Remove attribute: %s = ' %s'" %
# (aname, avalue))
self.assertEqual(form.ui.attributeTableWidget.columnCount(), 2)
self.assertEqual(
form.ui.attributeTableWidget.rowCount(), len(attributes)-1)
for i in range(len(attributes)-1):
it = form.ui.attributeTableWidget.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = form.ui.attributeTableWidget.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
self.assertEqual(item, None)
# constructor test
# \brief It tests default settings
def test_populateAttribute_setFromNode_selected_tableItemChanged(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
qdn.setAttribute("logname", "mynlong%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link",
"component", "doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.createGUI()
atw = form.ui.attributeTableWidget
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link",
"component", "doc", "symbols"])
form.setFromNode()
attributes = {u'type': u'mytype%s' % nn, u'shortname':
u'mynshort%s' % nn, u'logname': u'mynlong%s' % nn,
u'unit': u'myunits%s' % nn}
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
"".join(["\nText\n %s\n" % n for n in range(ndcs)]).strip())
self.assertEqual(form.attributes, attributes)
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertTrue(not form.ui.nameLineEdit.text())
self.assertTrue(not form.ui.contentTextEdit.toPlainText())
self.assertTrue(not form.ui.docTextEdit.toPlainText())
self.assertEqual(atw.columnCount(), 2)
self.assertEqual(atw.rowCount(), 0)
na = self.__rnd.randint(0, len(attributes)-1)
sel = list(attributes.keys())[na]
form.populateAttributes(sel)
self.assertEqual(atw.columnCount(), 2)
self.assertEqual(atw.rowCount(), len(attributes))
for i in range(len(attributes)):
it = atw.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = atw.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
item = atw.item(atw.currentRow(), 0)
self.assertEqual(item.data(Qt.UserRole), sel)
ch = self.__rnd.randint(0, len(attributes)-1)
atw.setCurrentCell(ch, 0)
item = atw.item(atw.currentRow(), 0)
aname = str(item.data(Qt.UserRole))
it = QTableWidgetItem(unicode(aname))
it.setData(Qt.DisplayRole, (aname+"_"+attributes[aname]))
it.setData(Qt.UserRole, (aname))
atw.setCurrentCell(ch, 0)
QTimer.singleShot(10, self.checkMessageBox)
atw.setItem(ch, 0, it)
self.assertEqual(
self.text,
"To change the attribute name, please remove the attribute "
"and add the new one")
# avalue =
attributes[str(aname)]
self.assertEqual(atw.columnCount(), 2)
self.assertEqual(atw.rowCount(), len(attributes))
for i in range(len(attributes)):
it = atw.item(i, 0)
k = str(it.text())
self.assertTrue(k in attributes.keys())
it2 = atw.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
it = QTableWidgetItem(unicode(aname))
it.setData(Qt.DisplayRole, (aname+"_"+attributes[aname]))
it.setData(Qt.UserRole, (aname))
atw.setCurrentCell(ch, 1)
atw.setItem(ch, 1, it)
# avalue =
attributes[str(aname)]
self.assertEqual(atw.columnCount(), 2)
self.assertEqual(atw.rowCount(), len(attributes))
for i in range(len(attributes)):
it = atw.item(i, 0)
k = str(it.text())
if k != aname:
self.assertTrue(k in attributes.keys())
it2 = atw.item(i, 1)
self.assertEqual(it2.text(), attributes[k])
else:
it2 = atw.item(i, 1)
self.assertEqual(it2.text(), (aname+"_"+attributes[aname]))
# constructor test
# \brief It tests default settings
def test_updateNode(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.setFromNode()
form.createGUI()
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
nname = "newname"
ntype = "newtype"
attrs = {"unit": "newunit", "longname": "newlogname"}
mdoc = "New text \nNew text"
attributeMap = form.node.attributes()
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, form.name)
cnt += 1
else:
self.assertEqual(vl, form.attributes[str(nm)])
self.assertEqual(len(form.attributes), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(olddoc, form.doc)
text = DomTools.getText(form.node)
oldcont = unicode(text).strip() if text else ""
self.assertEqual(oldcont, form.content)
form.name = nname
form.content = ntype
form.attributes.clear()
for at in attrs.keys():
form.attributes[at] = attrs[at]
form.doc = mdoc
form.root = doc
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
form.updateNode()
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, nname)
cnt += 1
elif nm == "type":
self.assertEqual(vl, ntype)
cnt += 1
else:
self.assertEqual(vl, attrs[str(nm)])
self.assertEqual(len(attrs), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(olddoc, mdoc)
# constructor test
# \brief It tests default settings
def test_updateNode_withindex(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link",
"component", "doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.setFromNode()
form.createGUI()
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
nname = "newname"
ntype = "newtype"
attrs = {"unit": "newunit", "longname": "newlogname"}
mdoc = "New text \nNew text"
attributeMap = form.node.attributes()
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, form.name)
cnt += 1
else:
self.assertEqual(vl, form.attributes[str(nm)])
self.assertEqual(len(form.attributes), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(olddoc, form.doc)
text = DomTools.getText(form.node)
oldcont = unicode(text).strip() if text else ""
self.assertEqual(oldcont, form.content)
form.name = nname
form.content = ntype
form.attributes.clear()
for at in attrs.keys():
form.attributes[at] = attrs[at]
form.doc = mdoc
form.root = doc
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
form.updateNode(di)
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, nname)
cnt += 1
elif nm == "type":
self.assertEqual(vl, ntype)
cnt += 1
else:
self.assertEqual(vl, attrs[str(nm)])
self.assertEqual(len(attrs), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(olddoc, mdoc)
text = DomTools.getText(form.node)
oldcont = unicode(text).strip() if text else ""
self.assertEqual(oldcont, form.content)
# constructor test
# \brief It tests default settings
def test_apply(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link", "component",
"doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.setFromNode()
form.createGUI()
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
attributeMap = form.node.attributes()
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, form.name)
cnt += 1
else:
self.assertEqual(vl, form.attributes[str(nm)])
self.assertEqual(len(form.attributes), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(olddoc, form.doc)
text = DomTools.getText(form.node)
oldcont = unicode(text).strip() if text else ""
self.assertEqual(oldcont, form.content)
nname = "newname"
ntype = "newtype"
attrs = {"unit": "newunit", "longname": "newlogname",
"mynew": "newvalue"}
mdoc = "New text New text"
form.root = doc
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
form.ui.nameLineEdit.setText(nname)
form.ui.contentTextEdit.setText(ntype)
form.ui.docTextEdit.setText(str(mdoc))
form.ui.docTextEdit.setText(str(mdoc))
for r in form.attributes:
form.ui.attributeTableWidget.setCurrentCell(0, 1)
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
print(item.text())
# QTimer.singleShot(10, self.rmAttributeWidget)
QTest.mouseClick(form.ui.removePushButton, Qt.LeftButton)
i = 0
for r in attrs:
form.ui.attributeTableWidget.setCurrentCell(i, 1)
self.aname = r
self.avalue = attrs[r]
QTimer.singleShot(10, self.attributeWidget)
QTest.mouseClick(form.ui.addPushButton, Qt.LeftButton)
i += 1
form.apply()
self.assertEqual(form.name, nname)
self.assertEqual(form.content, ntype)
self.assertEqual(form.doc, mdoc)
self.assertEqual(form.attributes, attrs)
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, nname)
cnt += 1
elif nm == "type":
self.assertEqual(vl, ntype)
cnt += 1
else:
self.assertEqual(vl, attrs[str(nm)])
self.assertEqual(len(attrs), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(olddoc, mdoc)
text = DomTools.getText(form.node)
oldcont = unicode(text).strip() if text else ""
self.assertEqual(oldcont, form.content)
# constructor test
# \brief It tests default settings
def test_reset(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link",
"component", "doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.setFromNode()
form.createGUI()
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
attributeMap = form.node.attributes()
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, form.name)
cnt += 1
else:
self.assertEqual(vl, form.attributes[str(nm)])
self.assertEqual(len(form.attributes), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(olddoc, form.doc)
text = DomTools.getText(form.node)
oldcont = unicode(text).strip() if text else ""
self.assertEqual(oldcont, form.content)
nname = "newname"
ntype = "newtype"
attrs = {"unit": "newunit", "longname": "newlogname",
"mynew": "newvalue"}
mdoc = "New text New text"
form.root = doc
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
form.ui.nameLineEdit.setText(nname)
form.ui.contentTextEdit.setText(ntype)
form.ui.docTextEdit.setText(str(mdoc))
form.ui.docTextEdit.setText(str(mdoc))
for r in form.attributes:
form.ui.attributeTableWidget.setCurrentCell(0, 1)
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
print(item.text())
# QTimer.singleShot(10, self.rmAttributeWidget)
QTest.mouseClick(form.ui.removePushButton, Qt.LeftButton)
i = 0
for r in attrs:
form.ui.attributeTableWidget.setCurrentCell(i, 1)
self.aname = r
self.avalue = attrs[r]
QTimer.singleShot(10, self.attributeWidget)
QTest.mouseClick(form.ui.addPushButton, Qt.LeftButton)
i += 1
form.reset()
ats = {u'shortname': u'mynshort%s' % nn, u'type': u'mytype%s' % nn,
u'unit': u'myunits%s' % nn}
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
("".join(["\nText\n %s\n" % ii for ii in range(ndcs)])).strip())
self.assertEqual(form.attributes, ats)
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, "myname%s" % nn)
cnt += 1
else:
self.assertEqual(vl, ats[str(nm)])
self.assertEqual(len(ats), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(
olddoc,
("".join(["\nText\n %s\n" % i for i in range(ndcs)])).strip())
text = DomTools.getText(form.node)
oldcont = unicode(text).strip() if text else ""
self.assertEqual(oldcont, form.content)
# constructor test
# \brief It tests default settings
def test_reset_button(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
dks = []
cks = []
doc = QDomDocument()
nname = "definition"
qdn = doc.createElement(nname)
nn = self.__rnd.randint(0, 9)
qdn.setAttribute("name", "myname%s" % nn)
qdn.setAttribute("type", "mytype%s" % nn)
qdn.setAttribute("unit", "myunits%s" % nn)
qdn.setAttribute("shortname", "mynshort%s" % nn)
cks.append(doc.createTextNode("$components.some%s\n" % nn))
qdn.appendChild(cks[-1])
doc.appendChild(qdn)
dname = "doc"
mdoc = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
form = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "attribute", "link",
"component", "doc", "symbols"])
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
form.setFromNode()
form.createGUI()
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
attributeMap = form.node.attributes()
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, form.name)
cnt += 1
else:
self.assertEqual(vl, form.attributes[str(nm)])
self.assertEqual(len(form.attributes), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(olddoc, form.doc)
nname = "newname"
ntype = "newtype"
attrs = {"unit": "newunit", "longname": "newlogname",
"mynew": "newvalue"}
mdoc = "New text New text"
form.root = doc
allAttr = True
cm = ComponentModel(doc, allAttr)
ri = cm.rootIndex
di = cm.index(0, 0, ri)
form.view = TestView(cm)
form.view.testIndex = di
form.ui.nameLineEdit.setText(nname)
form.ui.contentTextEdit.setText(ntype)
form.ui.docTextEdit.setText(str(mdoc))
form.ui.docTextEdit.setText(str(mdoc))
for r in form.attributes:
form.ui.attributeTableWidget.setCurrentCell(0, 1)
item = form.ui.attributeTableWidget.item(
form.ui.attributeTableWidget.currentRow(), 0)
print(item.text())
# QTimer.singleShot(10, self.rmAttributeWidget)
QTest.mouseClick(form.ui.removePushButton, Qt.LeftButton)
i = 0
for r in attrs:
form.ui.attributeTableWidget.setCurrentCell(i, 1)
self.aname = r
self.avalue = attrs[r]
QTimer.singleShot(10, self.attributeWidget)
QTest.mouseClick(form.ui.addPushButton, Qt.LeftButton)
i += 1
QTest.mouseClick(form.ui.resetPushButton, Qt.LeftButton)
ats = {u'shortname': u'mynshort%s' % nn, u'type': u'mytype%s' % nn,
u'unit': u'myunits%s' % nn}
self.assertEqual(form.name, "myname%s" % nn)
self.assertEqual(form.content, "$components.some%s" % nn)
self.assertEqual(
form.doc,
("".join(["\nText\n %s\n" % ii for ii in range(ndcs)])).strip())
self.assertEqual(form.attributes, ats)
cnt = 0
for i in range(attributeMap.count()):
nm = attributeMap.item(i).nodeName()
vl = attributeMap.item(i).nodeValue()
if nm == "name":
self.assertEqual(vl, "myname%s" % nn)
cnt += 1
else:
self.assertEqual(vl, ats[str(nm)])
self.assertEqual(len(ats), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(
olddoc,
("".join(["\nText\n %s\n" % i for i in range(ndcs)])).strip())
text = DomTools.getText(form.node)
oldcont = unicode(text).strip() if text else ""
self.assertEqual(oldcont, form.content)
def myAction(self):
self.performed = True
# constructor test
# constructor test
# \brief It tests default settings
def test_connect_actions(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.show()
self.assertEqual(form.connectExternalActions(), None)
self.assertEqual(form.node, None)
self.assertEqual(form.root, None)
self.assertEqual(form.view, None)
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
self.assertEqual(form.externalApply, None)
self.assertEqual(form.externalDSLink, None)
# self.assertTrue(isinstance(DomTools, DomTools))
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_connect_actions_with_action(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.show()
self.assertEqual(form.connectExternalActions(self.myAction), None)
self.assertEqual(form.node, None)
self.assertEqual(form.root, None)
self.assertEqual(form.view, None)
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
self.assertEqual(form.externalApply, None)
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_connect_actions_with_button(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
# form.ui = Ui_DefinitionDlg()
form.ui.applyPushButton = QPushButton(form)
form.ui.linkDSPushButton = QPushButton(form)
form.show()
self.assertEqual(form.connectExternalActions(), None)
self.assertEqual(form.node, None)
self.assertEqual(form.root, None)
self.assertEqual(form.view, None)
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
self.assertEqual(form.externalApply, None)
self.assertEqual(form.externalDSLink, None)
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_connect_actions_with_action_button(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
# form.ui = Ui_DefinitionDlg()
form.ui.applyPushButton = QPushButton(form)
form.show()
self.assertEqual(form.connectExternalActions(self.myAction), None)
self.assertEqual(form.node, None)
self.assertEqual(form.root, None)
self.assertEqual(form.view, None)
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
self.assertEqual(form.externalApply, self.myAction)
self.performed = False
QTest.mouseClick(form.ui.applyPushButton, Qt.LeftButton)
self.assertEqual(self.performed, True)
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_connect_actions_with_action_link_button(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.createGUI()
form.show()
self.assertEqual(
form.connectExternalActions(None, self.myAction), None)
self.assertEqual(form.node, None)
self.assertEqual(form.root, None)
self.assertEqual(form.view, None)
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
self.assertEqual(form.externalDSLink, None)
self.performed = False
self.assertEqual(form.result(), 0)
# constructor test
# \brief It tests default settings
def test_connect_actions_with_action_link_and_apply_button(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
form = DefinitionDlg()
form.createGUI()
form.show()
self.assertEqual(form.connectExternalActions(self.myAction, None),
None)
self.assertEqual(form.node, None)
self.assertEqual(form.root, None)
self.assertEqual(form.view, None)
self.assertEqual(form.ui.__class__.__name__, "Ui_DefinitionDlg")
self.assertEqual(form.externalApply, self.myAction)
self.assertEqual(form.externalDSLink, None)
self.performed = False
QTest.mouseClick(form.ui.applyPushButton, Qt.LeftButton)
self.assertEqual(self.performed, True)
self.assertEqual(form.result(), 0)
if __name__ == '__main__':
if not app:
app = QApplication([])
unittest.main()
| nexdatas/configtool | test/DefinitionDlg_test.py | Python | gpl-3.0 | 85,762 | 0 |
#!/usr/bin/env python3
import sys
import numatuned
dryrun = False
if len(sys.argv) > 1:
if sys.argv[1] == '-n':
dryrun = True
numatuned.fire(60, dryrun)
| dionbosschieter/numatuned | numatuned.py | Python | mit | 167 | 0 |
import tflearn
from tflearn.data_utils import to_categorical, pad_sequences
from tflearn.datasets import imdb
# IMDB Dataset loading
train, test, _ = imdb.load_data(path='imdb.pkl', n_words=10000,
valid_portion=0.1)
trainX, trainY = train
testX, testY = test
# Data preprocessing
# Sequence padding
trainX = pad_sequences(trainX, maxlen=100, value=0.)
testX = pad_sequences(testX, maxlen=100, value=0.)
# Converting labels to binary vectors
trainY = to_categorical(trainY, nb_classes=2)
testY = to_categorical(testY, nb_classes=2)
# Network building
net = tflearn.input_data([None, 100])
net = tflearn.embedding(net, input_dim=10000, output_dim=128)
net = tflearn.lstm(net, 128, dropout=0.8)
net = tflearn.fully_connected(net, 2, activation='softmax')
net = tflearn.regression(net, optimizer='adam', learning_rate=0.001,
loss='categorical_crossentropy')
# Training
model = tflearn.DNN(net, tensorboard_verbose=0)
model.fit(trainX, trainY, validation_set=(testX, testY), show_metric=True,
batch_size=32)
model.save('sentiment.tflearn') | sethuiyer/mlhub | Deep Sentiment Analysis/build_sentiment_model.py | Python | mit | 1,105 | 0.000905 |
# Copyright (c) 2020, Djaodjin Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pylint:disable=unused-argument,unused-import
try:
from drf_yasg.openapi import Response as OpenAPIResponse
from drf_yasg.utils import no_body, swagger_auto_schema
except ImportError:
from functools import wraps
from .compat import available_attrs
class no_body(object): #pylint:disable=invalid-name
pass
def swagger_auto_schema(function=None, **kwargs):
"""
Dummy decorator when drf_yasg is not present.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
return view_func(request, *args, **kwargs)
return _wrapped_view
if function:
return decorator(function)
return decorator
class OpenAPIResponse(object):
"""
Dummy response object to document API.
"""
def __init__(self, *args, **kwargs):
pass
| djaodjin/djaodjin-signup | signup/docs.py | Python | bsd-2-clause | 2,317 | 0.001295 |
from fabric.api import env, local, run, sudo
env.user = 'root'
env.hosts = ['204.232.205.6']
env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org'
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
env.chef_executable = '/var/lib/gems/1.8/bin/chef-solo'
def install_chef():
sudo('apt-get update', pty=True)
sudo('apt-get install -y git-core rubygems ruby ruby-dev', pty=True)
sudo('gem install chef --no-ri --no-rdoc', pty=True)
def sync_config():
local('rsync -av . %s@%s:/etc/chef' % (env.user, env.hosts[0]))
def update():
sync_config()
sudo('cd /etc/chef && %s' % env.chef_executable, pty=True)
def reload():
"Reload the server."
env.user = "docs"
run("kill -HUP `cat %s/gunicorn.pid`" % env.rundir, pty=True)
def restart():
"Restart (or just start) the server"
sudo('restart readthedocs-gunicorn', pty=True)
| alex/readthedocs.org | deploy/fabfile.py | Python | mit | 940 | 0.004255 |
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
low=1<<31
profit=0
for p in prices:
if p<low:
low=p
if p-low>profit:
profit=p-low
return profit | Tanych/CodeTracking | 121-Best-Time-to-Buy-and-Sell-Stock/solution.py | Python | mit | 316 | 0.025316 |
import sqlalchemy as sa
from oslo_db.sqlalchemy import types as db_types
from nca47.db.sqlalchemy.models import base as model_base
from nca47.objects import attributes as attr
HasTenant = model_base.HasTenant
HasId = model_base.HasId
HasStatus = model_base.HasStatus
HasOperationMode = model_base.HasOperationMode
class DnsServer(model_base.BASE, HasId, HasOperationMode):
"""Represents a dns server."""
name = sa.Column(sa.String(attr.NAME_MAX_LEN))
class Zone(model_base.BASE, HasId, HasOperationMode):
"""Represents a dns zone."""
__tablename__ = 'dns_zone_info'
zone_name = sa.Column(sa.String(attr.NAME_MAX_LEN))
tenant_id = sa.Column(sa.String(attr.NAME_MAX_LEN))
zone_id = sa.Column(sa.String(attr.NAME_MAX_LEN))
vres_id = sa.Column(sa.String(attr.NAME_MAX_LEN))
masters = sa.Column(db_types.JsonEncodedList)
slaves = sa.Column(db_types.JsonEncodedList)
renewal = sa.Column(sa.String(attr.NAME_MAX_LEN))
default_ttl = sa.Column(sa.String(attr.NAME_MAX_LEN))
owners = sa.Column(db_types.JsonEncodedList)
ad_controller = sa.Column(sa.String(attr.NAME_MAX_LEN))
comment = sa.Column(sa.String(attr.NAME_MAX_LEN))
class ZoneRecord(model_base.BASE, HasId, HasOperationMode):
"""Represents a dns zone."""
__tablename__ = 'dns_rrs_info'
zone_id = sa.Column(sa.String(attr.UUID_LEN))
rrs_id = sa.Column(sa.String(attr.NAME_MAX_LEN))
rrs_name = sa.Column(sa.String(attr.NAME_MAX_LEN))
type = sa.Column(sa.String(attr.NAME_MAX_LEN))
klass = sa.Column(sa.String(attr.NAME_MAX_LEN))
ttl = sa.Column(sa.String(attr.NAME_MAX_LEN))
rdata = sa.Column(sa.String(attr.NAME_MAX_LEN))
| willowd878/nca47 | nca47/db/sqlalchemy/models/dns.py | Python | apache-2.0 | 1,680 | 0 |
from utile import pretty_xml, xml_to_dict, element_to_dict
from testsuite.support import etree, TestCase
import unittest
XML_DATA = "<html><body><h1>test1</h1><h2>test2</h2></body></html>"
XML_PRETTY = """\
<html>
<body>
<h1>test1</h1>
<h2>test2</h2>
</body>
</html>
"""
XML_DICT = {'body': {'h2': 'test2', 'h1': 'test1'}}
@unittest.skipUnless(etree, 'lxml not installed')
class XMLTestCase(TestCase):
def test_pretty_xml(self):
self.assertEqual(pretty_xml(XML_DATA), XML_PRETTY)
def test_element_to_dict(self):
self.assertEqual(element_to_dict(etree.XML(XML_DATA)), XML_DICT)
def test_xml_to_dict(self):
self.assertEqual(xml_to_dict(XML_DATA), XML_DICT)
| marwano/utile | testsuite/test_xml.py | Python | bsd-3-clause | 709 | 0 |
#!/usr/bin/env python
#
# Original filename: config.py
#
# Author: Tim Brandt
# Email: tbrandt@astro.princeton.edu
# Date: August 2011
#
# Summary: Set configuration parameters to sensible values.
#
import re
from subprocess import *
import multiprocessing
import numpy as np
def config(nframes, framesize):
###################################################################
# Fetch the total amount of physical system memory in bytes.
# This is the second entry on the second line of the standard
# output of the 'free' command.
###################################################################
print "\nGetting system parameters, setting pipeline execution parameters..."
osver = Popen(["uname", "-a"], stdout=PIPE).stdout.read()
if osver.startswith("Linux"):
print "You are running Linux."
elif osver.startswith("Darwin"):
print "You are running Mac OS-X."
else:
print "Your operating system is not recognized."
if osver.startswith("Linux"):
mem = Popen(["free", "-b"], stdout=PIPE).stdout.read()
mem = int(mem.split('\n')[1].split()[1])
elif osver.startswith("Darwin"):
mem = Popen(["vm_stat"], stdout=PIPE).stdout.read().split('\n')
blocksize = re.search('.*size of ([0-9]+) bytes.*', mem[0]).group(1)
totmem = 0.
for line in mem:
if np.any(["Pages free:" in line, "Pages active:" in line,
"Pages inactive:" in line, "Pages speculative:" in line,
"Pages wired down:" in line]):
totmem += float(line.split(':')[1]) * float(blocksize)
mem = int(totmem)
ncpus = multiprocessing.cpu_count()
hostname = Popen("hostname", stdout=PIPE).stdout.read().split()[0]
print "\n You are running on " + hostname + "."
print " You have " + str(mem / 2**20) + " megabytes of memory and " + \
str(ncpus) + " threads available."
datasize = framesize * nframes * 4
print " The dataset consists of " + str(nframes) + " frames, " + \
str(datasize * 100 / mem) + "% of your physical RAM."
storeall = False
if datasize * 100 / mem < 20:
storeall = True
print " --> You have enough RAM to store all data."
print " The pipeline will not need to write all intermediate files."
else:
print " --> You do not have enough RAM to store all data."
print " The pipeline will need to write all intermediate files"
print " and do the reduction in pieces."
return mem, ncpus, storeall
| t-brandt/acorns-adi | utils/config.py | Python | bsd-2-clause | 2,628 | 0.002664 |
# (c) 2018 Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import ConnectionError
from ansible.module_utils.network.common.utils import to_list
from ansible.plugins.httpapi import HttpApiBase
from ansible.utils.display import Display
display = Display()
class HttpApi(HttpApiBase):
def _run_queue(self, queue, output):
if self._become:
display.vvvv('firing event: on_become')
queue.insert(0, 'enable')
request = request_builder(queue, output)
headers = {'Content-Type': 'application/json'}
response, response_data = self.connection.send('/ins', request, headers=headers, method='POST')
try:
response_data = json.loads(to_text(response_data.getvalue()))
except ValueError:
raise ConnectionError('Response was not valid JSON, got {0}'.format(
to_text(response_data.getvalue())
))
results = handle_response(response_data)
if self._become:
results = results[1:]
return results
def send_request(self, data, **message_kwargs):
output = None
queue = list()
responses = list()
for item in to_list(data):
cmd_output = message_kwargs.get('output', 'text')
if isinstance(item, dict):
command = item['command']
if 'output' in item:
cmd_output = item['output']
else:
command = item
# Emulate '| json' from CLI
if command.endswith('| json'):
command = command.rsplit('|', 1)[0]
cmd_output = 'json'
if output and output != cmd_output:
responses.extend(self._run_queue(queue, output))
queue = list()
output = cmd_output
queue.append(command)
if queue:
responses.extend(self._run_queue(queue, output))
if len(responses) == 1:
return responses[0]
return responses
def edit_config(self, candidate=None, commit=True, replace=None, comment=None):
resp = list()
operations = self.connection.get_device_operations()
self.connection.check_edit_config_capability(operations, candidate, commit, replace, comment)
if replace:
device_info = self.connection.get_device_info()
if '9K' not in device_info.get('network_os_platform', ''):
raise ConnectionError(msg=u'replace is supported only on Nexus 9K devices')
candidate = 'config replace {0}'.format(replace)
responses = self.send_request(candidate, output='config')
for response in to_list(responses):
if response != '{}':
resp.append(response)
if not resp:
resp = ['']
return resp
def run_commands(self, commands, check_rc=True):
"""Runs list of commands on remote device and returns results
"""
try:
out = self.send_request(commands)
except ConnectionError as exc:
if check_rc is True:
raise
out = to_text(exc)
out = to_list(out)
if not out[0]:
return out
for index, response in enumerate(out):
if response[0] == '{':
out[index] = json.loads(response)
return out
def handle_response(response):
results = []
if response['ins_api'].get('outputs'):
for output in to_list(response['ins_api']['outputs']['output']):
if output['code'] != '200':
raise ConnectionError('%s: %s' % (output['input'], output['msg']))
elif 'body' in output:
result = output['body']
if isinstance(result, dict):
result = json.dumps(result)
results.append(result.strip())
return results
def request_builder(commands, output, version='1.0', chunk='0', sid=None):
"""Encodes a NXAPI JSON request message
"""
output_to_command_type = {
'text': 'cli_show_ascii',
'json': 'cli_show',
'bash': 'bash',
'config': 'cli_conf'
}
maybe_output = commands[0].split('|')[-1].strip()
if maybe_output in output_to_command_type:
command_type = output_to_command_type[maybe_output]
commands = [command.split('|')[0].strip() for command in commands]
else:
try:
command_type = output_to_command_type[output]
except KeyError:
msg = 'invalid format, received %s, expected one of %s' % \
(output, ','.join(output_to_command_type.keys()))
raise ConnectionError(msg)
if isinstance(commands, (list, set, tuple)):
commands = ' ;'.join(commands)
msg = {
'version': version,
'type': command_type,
'chunk': chunk,
'sid': sid,
'input': commands,
'output_format': 'json'
}
return json.dumps(dict(ins_api=msg))
| romain-dartigues/ansible | lib/ansible/plugins/httpapi/nxos.py | Python | gpl-3.0 | 5,290 | 0.001323 |
import json
import re
import uuid
from django.utils.translation import ugettext as _
from ide.utils.project import APPINFO_MANIFEST, PACKAGE_MANIFEST, InvalidProjectArchiveException
__author__ = 'katharine'
def manifest_name_for_project(project):
if project.is_standard_project_type and project.sdk_version == '3':
return PACKAGE_MANIFEST
else:
return APPINFO_MANIFEST
def generate_manifest(project, resources):
if project.is_standard_project_type:
if project.sdk_version == '2':
return generate_v2_manifest(project, resources)
else:
return generate_v3_manifest(project, resources)
elif project.project_type == 'pebblejs':
return generate_pebblejs_manifest(project, resources)
elif project.project_type == 'simplyjs':
return generate_simplyjs_manifest(project)
else:
raise Exception(_("Unknown project type %s") % project.project_type)
def generate_v2_manifest(project, resources):
return dict_to_pretty_json(generate_v2_manifest_dict(project, resources))
def generate_v3_manifest(project, resources):
return dict_to_pretty_json(generate_v3_manifest_dict(project, resources))
def generate_v2_manifest_dict(project, resources):
manifest = {
'uuid': str(project.app_uuid),
'shortName': project.app_short_name,
'longName': project.app_long_name,
'companyName': project.app_company_name,
'versionLabel': project.app_version_label,
'versionCode': 1,
'watchapp': {
'watchface': project.app_is_watchface
},
'appKeys': json.loads(project.app_keys),
'resources': generate_resource_dict(project, resources),
'projectType': 'native',
'sdkVersion': "2",
}
if project.app_capabilities:
manifest['capabilities'] = project.app_capabilities.split(',')
if project.app_is_shown_on_communication:
manifest['watchapp']['onlyShownOnCommunication'] = project.app_is_shown_on_communication
return manifest
def generate_v3_manifest_dict(project, resources):
manifest = {
'name': project.npm_name,
'author': project.app_company_name,
'version': project.semver,
'keywords': project.keywords,
'dependencies': project.get_dependencies(),
'pebble': {
'sdkVersion': project.sdk_version,
'watchapp': {
'watchface': project.app_is_watchface
},
'messageKeys': json.loads(project.app_keys),
'resources': generate_resource_dict(project, resources),
'projectType': project.project_type
}
}
if project.app_capabilities:
manifest['pebble']['capabilities'] = project.app_capabilities.split(',')
if project.project_type == 'package':
manifest['files'] = ['dist.zip']
else:
manifest['pebble']['uuid'] = str(project.app_uuid)
manifest['pebble']['enableMultiJS'] = project.app_modern_multi_js
manifest['pebble']['displayName'] = project.app_long_name
if project.app_is_hidden:
manifest['pebble']['watchapp']['hiddenApp'] = project.app_is_hidden
if project.app_platforms:
manifest['pebble']['targetPlatforms'] = project.app_platform_list
return manifest
def generate_manifest_dict(project, resources):
if project.is_standard_project_type:
if project.sdk_version == '2':
return generate_v2_manifest_dict(project, resources)
else:
return generate_v3_manifest_dict(project, resources)
elif project.project_type == 'simplyjs':
return generate_simplyjs_manifest_dict(project)
elif project.project_type == 'pebblejs':
return generate_pebblejs_manifest_dict(project, resources)
else:
raise Exception(_("Unknown project type %s") % project.project_type)
def dict_to_pretty_json(d):
return json.dumps(d, indent=4, separators=(',', ': '), sort_keys=True) + "\n"
def generate_resource_dict(project, resources):
if project.is_standard_project_type:
return generate_native_resource_dict(project, resources)
elif project.project_type == 'simplyjs':
return generate_simplyjs_resource_dict()
elif project.project_type == 'pebblejs':
return generate_pebblejs_resource_dict(resources)
else:
raise Exception(_("Unknown project type %s") % project.project_type)
def generate_native_resource_dict(project, resources):
resource_map = {'media': []}
for resource in resources:
for resource_id in resource.get_identifiers():
d = {
'type': resource.kind,
'file': resource.root_path,
'name': resource_id.resource_id,
}
if resource_id.character_regex:
d['characterRegex'] = resource_id.character_regex
if resource_id.tracking:
d['trackingAdjust'] = resource_id.tracking
if resource_id.memory_format:
d['memoryFormat'] = resource_id.memory_format
if resource_id.storage_format:
d['storageFormat'] = resource_id.storage_format
if resource_id.space_optimisation:
d['spaceOptimization'] = resource_id.space_optimisation
if resource.is_menu_icon:
d['menuIcon'] = True
if resource_id.compatibility is not None:
d['compatibility'] = resource_id.compatibility
if project.sdk_version == '3' and resource_id.target_platforms:
d['targetPlatforms'] = json.loads(resource_id.target_platforms)
resource_map['media'].append(d)
return resource_map
def generate_simplyjs_resource_dict():
return {
"media": [
{
"menuIcon": True,
"type": "png",
"name": "IMAGE_MENU_ICON",
"file": "images/menu_icon.png"
}, {
"type": "png",
"name": "IMAGE_LOGO_SPLASH",
"file": "images/logo_splash.png"
}, {
"type": "font",
"name": "MONO_FONT_14",
"file": "fonts/UbuntuMono-Regular.ttf"
}
]
}
def generate_pebblejs_resource_dict(resources):
media = [
{
"menuIcon": True, # This must be the first entry; we adjust it later.
"type": "bitmap",
"name": "IMAGE_MENU_ICON",
"file": "images/menu_icon.png"
}, {
"type": "bitmap",
"name": "IMAGE_LOGO_SPLASH",
"file": "images/logo_splash.png"
}, {
"type": "bitmap",
"name": "IMAGE_TILE_SPLASH",
"file": "images/tile_splash.png"
}, {
"type": "font",
"name": "MONO_FONT_14",
"file": "fonts/UbuntuMono-Regular.ttf"
}
]
for resource in resources:
if resource.kind not in ('bitmap', 'png'):
continue
d = {
'type': resource.kind,
'file': resource.root_path,
'name': re.sub(r'[^A-Z0-9_]', '_', resource.root_path.upper()),
}
if resource.is_menu_icon:
d['menuIcon'] = True
del media[0]['menuIcon']
media.append(d)
return {
'media': media
}
def generate_simplyjs_manifest(project):
return dict_to_pretty_json(generate_simplyjs_manifest_dict(project))
def generate_simplyjs_manifest_dict(project):
manifest = {
"uuid": project.app_uuid,
"shortName": project.app_short_name,
"longName": project.app_long_name,
"companyName": project.app_company_name,
"versionLabel": project.app_version_label,
"versionCode": 1,
"capabilities": project.app_capabilities.split(','),
"watchapp": {
"watchface": project.app_is_watchface
},
"appKeys": {},
"resources": generate_simplyjs_resource_dict(),
"projectType": "simplyjs"
}
return manifest
def generate_pebblejs_manifest(project, resources):
return dict_to_pretty_json(generate_pebblejs_manifest_dict(project, resources))
def generate_pebblejs_manifest_dict(project, resources):
manifest = {
"uuid": project.app_uuid,
"shortName": project.app_short_name,
"longName": project.app_long_name,
"companyName": project.app_company_name,
"versionLabel": project.app_version_label,
"capabilities": project.app_capabilities.split(','),
"versionCode": 1,
"watchapp": {
"watchface": project.app_is_watchface,
'hiddenApp': project.app_is_hidden
},
"appKeys": {},
"resources": generate_pebblejs_resource_dict(resources),
"projectType": "pebblejs",
"sdkVersion": "3",
}
if project.app_platforms:
manifest["targetPlatforms"] = project.app_platform_list
return manifest
def load_manifest_dict(manifest, manifest_kind, default_project_type='native'):
""" Load data from a manifest dictionary
:param manifest: a dictionary of settings
:param manifest_kind: 'package.json' or 'appinfo.json'
:return: a tuple of (models.Project options dictionary, the media map, the dependencies dictionary)
"""
project = {}
dependencies = {}
if manifest_kind == APPINFO_MANIFEST:
project['app_short_name'] = manifest['shortName']
project['app_long_name'] = manifest['longName']
project['app_company_name'] = manifest['companyName']
project['app_version_label'] = manifest['versionLabel']
project['app_keys'] = dict_to_pretty_json(manifest.get('appKeys', {}))
project['sdk_version'] = manifest.get('sdkVersion', '2')
project['app_modern_multi_js'] = manifest.get('enableMultiJS', False)
elif manifest_kind == PACKAGE_MANIFEST:
project['app_short_name'] = manifest['name']
project['app_company_name'] = manifest['author']
project['semver'] = manifest['version']
project['app_long_name'] = manifest['pebble'].get('displayName', None)
project['app_keys'] = dict_to_pretty_json(manifest['pebble'].get('messageKeys', []))
project['keywords'] = manifest.get('keywords', [])
dependencies = manifest.get('dependencies', {})
manifest = manifest['pebble']
project['app_modern_multi_js'] = manifest.get('enableMultiJS', True)
project['sdk_version'] = manifest.get('sdkVersion', '3')
else:
raise InvalidProjectArchiveException(_('Invalid manifest kind: %s') % manifest_kind[-12:])
project['app_uuid'] = manifest.get('uuid', uuid.uuid4())
project['app_is_watchface'] = manifest.get('watchapp', {}).get('watchface', False)
project['app_is_hidden'] = manifest.get('watchapp', {}).get('hiddenApp', False)
project['app_is_shown_on_communication'] = manifest.get('watchapp', {}).get('onlyShownOnCommunication', False)
project['app_capabilities'] = ','.join(manifest.get('capabilities', []))
if 'targetPlatforms' in manifest:
project['app_platforms'] = ','.join(manifest['targetPlatforms'])
if 'resources' in manifest and 'media' in manifest['resources']:
media_map = manifest['resources']['media']
else:
media_map = {}
project['project_type'] = manifest.get('projectType', default_project_type)
return project, media_map, dependencies
| thunsaker/cloudpebble | ide/utils/sdk/manifest.py | Python | mit | 11,537 | 0.001127 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Comment.score'
db.add_column(u'comment', 'score', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
def backwards(self, orm):
# Deleting field 'Comment.score'
db.delete_column(u'comment', 'score')
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousproblem': {
'Meta': {'object_name': 'AnonymousProblem'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_problems'", 'to': "orm['askbot.Exercise']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.anonymousexercise': {
'Meta': {'object_name': 'AnonymousExercise'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.problem': {
'Meta': {'object_name': 'Problem', 'db_table': "u'problem'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'problems'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_problems'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_problems'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_problems'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'problems'", 'to': "orm['askbot.Exercise']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.problemrevision': {
'Meta': {'object_name': 'ProblemRevision', 'db_table': "u'problem_revision'"},
'problem': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Problem']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'problemrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.Badge']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badge': {
'Meta': {'unique_together': "(('name', 'type'),)", 'object_name': 'Badge', 'db_table': "u'badge'"},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'through': "'Award'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {})
},
'askbot.comment': {
'Meta': {'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoriteexercise': {
'Meta': {'object_name': 'FavoriteExercise', 'db_table': "u'favorite_exercise'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Exercise']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_exercises'", 'to': "orm['auth.User']"})
},
'askbot.flaggeditem': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'FlaggedItem', 'db_table': "u'flagged_item'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'flagged_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flaggeditems'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.exercise': {
'Meta': {'object_name': 'Exercise', 'db_table': "u'exercise'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'problem_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'problem_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exercises'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_exercises'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_exercises'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_exercises'", 'through': "'FavoriteExercise'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_exercises'", 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_exercises'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_exercises'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_exercises'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'exercises'", 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.exerciserevision': {
'Meta': {'object_name': 'ExerciseRevision', 'db_table': "u'exercise_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exerciserevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Exercise']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.exerciseview': {
'Meta': {'object_name': 'ExerciseView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Exercise']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'exercise_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Exercise']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'hide_ignored_exercises': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'exercises_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'tag_filter_setting': ('django.db.models.fields.CharField', [], {'default': "'ignored'", 'max_length': '16'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
| maxwward/SCOPEBak | askbot/migrations/0021_auto__add_field_comment_score.py | Python | gpl-3.0 | 25,917 | 0.008604 |
# -*- coding=utf8 -*-
#******************************************************************************
# MediaTypes.py
#------------------------------------------------------------------------------
#
# Copyright (c) 2015 LivingOn <LivingOn@xmail.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#******************************************************************************
import os
from resources.lib.SxxExxKennung import SxxExxKennung
from resources.lib.YoutubePlaylist import YoutubePlaylist
class MediaType(object):
_PLUGIN = "plugin://plugin.video.youtube/play/?video_id=%s"
_all_strm_files = []
_all_strm_folder = []
def create_strm_files(self):
raise NotImplemented
@classmethod
def activate_all_streams(cls, series_library, movies_library):
for strmfile in _get_inactive_strms_in(series_library):
_activate_stream_in(strmfile)
for strmfile in _get_inactive_strms_in(movies_library):
_activate_stream_in(strmfile)
_remove_inactive_file_from(series_library)
_remove_inactive_file_from(movies_library)
@classmethod
def exists_inactive_streams(cls, series_library, movies_library):
inactive_series = os.path.exists("%sinactive" % series_library)
inactive_movies = os.path.exists("%sinactive" % movies_library)
return inactive_series or inactive_movies
@classmethod
def all_strm_folder(cls):
all_folder = []
[all_folder.append(i) for i in cls._all_strm_folder if not i in all_folder]
return all_folder
@classmethod
def clear_all_strm_folder(cls):
cls._all_strm_folder = []
class NoMediaFile(MediaType):
def create_strm_files(self):
pass
class SingleMediaFile(MediaType):
def __init__(self, librarypath, title, videoid, season=None):
self._librarypath = librarypath
self._title = title
self._videoid = videoid
self._season = season
def create_strm_files(self):
folder = _create_strm_folder(self._librarypath, self._title)
self._all_strm_folder.append(folder)
title = self._season if self._season else self._title
strmfile = "%s/%s.strm" % (folder, title)
if _write_strm_file(strmfile, MediaType._PLUGIN % self._videoid):
_append_to_inactive_file(strmfile, self._librarypath)
class PlaylistFile(MediaType):
def __init__(self, libraypath, title, playlistid):
self._librarypath = libraypath
self._title = title
self._playlistid = playlistid
def create_strm_files(self):
folder = _create_strm_folder(self._librarypath, self._title)
self._all_strm_folder.append(folder)
for (title, videoid) in YoutubePlaylist.parse(self._playlistid):
serie = SxxExxKennung.parse(title)
if serie:
strmfile = "%s/%s.strm" % (folder, serie)
if _write_strm_file(strmfile, MediaType._PLUGIN % videoid):
_append_to_inactive_file(strmfile, self._librarypath)
def _create_strm_folder(librarypath, title):
folder = "%s%s" % (librarypath, title)
try:
os.mkdir(folder)
except OSError:
pass
return folder
def _write_strm_file(strmfile, content):
result = False
entryline = "%s\n" % content
if _is_not_in_strm_file(strmfile, entryline):
entryline = "#%s" % entryline
try:
open(strmfile, "a+").write(entryline)
result = True
except IOError:
pass
return result
def _append_to_inactive_file(strmfile, librarypath):
inactive_file = "%sinactive" % librarypath
entryline = "%s\n" % strmfile
try:
open(inactive_file, "a+").write(entryline)
except IOError:
pass
def _get_inactive_strms_in(library):
content = []
inactive_file = "%sinactive" % library
try:
content = open(inactive_file, "rU").readlines()
except IOError:
pass
result = []
[result.append(i.strip("\n")) for i in content if not i in result]
return result
def _is_not_in_strm_file(strmfile, entryline):
content = []
try:
content = open(strmfile, "rU").readlines()
except IOError:
pass
return not entryline in content
def _activate_stream_in(strmfile):
try:
content = open(strmfile, "rU").readlines()
new_content = []
for line in content:
if line.startswith("#plugin:"):
new_content.append(line[1:])
else:
new_content.append(line)
open(strmfile, "w").writelines(new_content)
except IOError:
pass
def _remove_inactive_file_from(library):
try:
os.remove("%sinactive" % library)
except OSError:
pass
| LivingOn/xbmc-script.youtube2kodi | resources/lib/MediaTypes.py | Python | gpl-2.0 | 5,517 | 0.005982 |
"""
This module is meant for vendorizing Python libraries. Most libraries will need
to have some ``sys.path`` alterations done unless they are doing relative
imports.
Do **not** add anything to this module that does not represent a vendorized
library.
Vendored libraries should go into the ``vendor`` directory and imported from
there. This is so we allow libraries that are installed normally to be imported
if the vendored module is not available.
The import dance here is done so that all other imports throught ceph-deploy
are kept the same regardless of where the module comes from.
The expected way to import remoto would look like this::
from ceph_deploy.lib import remoto
"""
try:
# vendored
from .vendor import remoto
except ImportError:
# normally installed
import remoto # noqa
| SUSE/ceph-deploy | ceph_deploy/lib/__init__.py | Python | mit | 817 | 0 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RPbdzmq(RPackage):
"""Programming with Big Data -- Interface to 'ZeroMQ'
'ZeroMQ' is a well-known library for high-performance asynchronous
messaging in scalable, distributed applications. This package provides
high level R wrapper functions to easily utilize 'ZeroMQ'. We mainly focus
on interactive client/server programming frameworks. For convenience, a
minimal 'ZeroMQ' library (4.1.0 rc1) is shipped with 'pbdZMQ', which can
be used if no system installation of 'ZeroMQ' is available. A few wrapper
functions compatible with 'rzmq' are also provided."""
homepage = "http://r-pbd.org/"
url = "https://cloud.r-project.org/src/contrib/pbdZMQ_0.2-4.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/pbdZMQ"
version('0.3-4', sha256='07794bd6858e093f8b6b879ddd5ab0195449b47a41b70cab2f60603f0a53b129')
version('0.3-3', sha256='ae26c13400e2acfb6463ff9b67156847a22ec79f3b53baf65119efaba1636eca')
version('0.3-2', sha256='ece2a2881c662f77126e4801ba4e01c991331842b0d636ce5a2b591b9de3fc37')
version('0.2-4', sha256='bfacac88b0d4156c70cf63fc4cb9969a950693996901a4fa3dcd59949ec065f6')
depends_on('r@3.0.0:', type=('build', 'run'))
depends_on('r@3.2.0:', when='@0.2-6:', type=('build', 'run'))
depends_on('r@3.5.0:', when='@0.3-4:', type=('build', 'run'))
depends_on('r-r6', when='@:0.2-6', type=('build', 'run'))
depends_on('libzmq@4.0.4:')
| LLNL/spack | var/spack/repos/builtin/packages/r-pbdzmq/package.py | Python | lgpl-2.1 | 1,667 | 0.002999 |
# Copyright (c) 2010 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.connection import AWSQueryConnection
from boto.sdb.regioninfo import SDBRegionInfo
import boto
try:
import json
except ImportError:
import simplejson as json
#boto.set_stream_logger('sns')
class SNSConnection(AWSQueryConnection):
DefaultRegionName = 'us-east-1'
DefaultRegionEndpoint = 'sns.us-east-1.amazonaws.com'
APIVersion = '2010-03-31'
SignatureVersion = '2'
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/', converter=None):
if not region:
region = SDBRegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint)
self.region = region
AWSQueryConnection.__init__(self, aws_access_key_id, aws_secret_access_key,
is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,
self.region.endpoint, debug, https_connection_factory, path)
def get_all_topics(self, next_token=None):
"""
:type next_token: string
:param next_token: Token returned by the previous call to
this method.
"""
params = {'ContentType' : 'JSON'}
if next_token:
params['NextToken'] = next_token
response = self.make_request('ListTopics', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def get_topic_attributes(self, topic):
"""
Get attributes of a Topic
:type topic: string
:param topic: The ARN of the topic.
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic}
response = self.make_request('GetTopicAttributes', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def add_permission(self, topic, label, account_ids, actions):
"""
Adds a statement to a topic's access control policy, granting
access for the specified AWS accounts to the specified actions.
:type topic: string
:param topic: The ARN of the topic.
:type label: string
:param label: A unique identifier for the new policy statement.
:type account_ids: list of strings
:param account_ids: The AWS account ids of the users who will be
give access to the specified actions.
:type actions: list of strings
:param actions: The actions you want to allow for each of the
specified principal(s).
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic,
'Label' : label}
self.build_list_params(params, account_ids, 'AWSAccountId')
self.build_list_params(params, actions, 'ActionName')
response = self.make_request('AddPermission', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def remove_permission(self, topic, label):
"""
Removes a statement from a topic's access control policy.
:type topic: string
:param topic: The ARN of the topic.
:type label: string
:param label: A unique identifier for the policy statement
to be removed.
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic,
'Label' : label}
response = self.make_request('RemovePermission', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def create_topic(self, topic):
"""
Create a new Topic.
:type topic: string
:param topic: The name of the new topic.
"""
params = {'ContentType' : 'JSON',
'Name' : topic}
response = self.make_request('CreateTopic', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def delete_topic(self, topic):
"""
Delete an existing topic
:type topic: string
:param topic: The ARN of the topic
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic}
response = self.make_request('DeleteTopic', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def publish(self, topic, message, subject=None):
"""
Get properties of a Topic
:type topic: string
:param topic: The ARN of the new topic.
:type message: string
:param message: The message you want to send to the topic.
Messages must be UTF-8 encoded strings and
be at most 4KB in size.
:type subject: string
:param subject: Optional parameter to be used as the "Subject"
line of the email notifications.
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic,
'Message' : message}
if subject:
params['Subject'] = subject
response = self.make_request('Publish', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def subscribe(self, topic, protocol, endpoint):
"""
Subscribe to a Topic.
:type topic: string
:param topic: The name of the new topic.
:type protocol: string
:param protocol: The protocol used to communicate with
the subscriber. Current choices are:
email|email-json|http|https|sqs
:type endpoint: string
:param endpoint: The location of the endpoint for
the subscriber.
* For email, this would be a valid email address
* For email-json, this would be a valid email address
* For http, this would be a URL beginning with http
* For https, this would be a URL beginning with https
* For sqs, this would be the ARN of an SQS Queue
:rtype: :class:`boto.sdb.domain.Domain` object
:return: The newly created domain
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic,
'Protocol' : protocol,
'Endpoint' : endpoint}
response = self.make_request('Subscribe', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def confirm_subscription(self, topic, token,
authenticate_on_unsubscribe=False):
"""
Get properties of a Topic
:type topic: string
:param topic: The ARN of the new topic.
:type token: string
:param token: Short-lived token sent to and endpoint during
the Subscribe operation.
:type authenticate_on_unsubscribe: bool
:param authenticate_on_unsubscribe: Optional parameter indicating
that you wish to disable
unauthenticated unsubscription
of the subscription.
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic,
'Token' : token}
if authenticate_on_unsubscribe:
params['AuthenticateOnUnsubscribe'] = 'true'
response = self.make_request('ConfirmSubscription', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def unsubscribe(self, subscription):
"""
Allows endpoint owner to delete subscription.
Confirmation message will be delivered.
:type subscription: string
:param subscription: The ARN of the subscription to be deleted.
"""
params = {'ContentType' : 'JSON',
'SubscriptionArn' : subscription}
response = self.make_request('Unsubscribe', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def get_all_subscriptions(self, next_token=None):
"""
Get list of all subscriptions.
:type next_token: string
:param next_token: Token returned by the previous call to
this method.
"""
params = {'ContentType' : 'JSON'}
if next_token:
params['NextToken'] = next_token
response = self.make_request('ListSubscriptions', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def get_all_subscriptions_by_topic(self, topic, next_token=None):
"""
Get list of all subscriptions to a specific topic.
:type topic: string
:param topic: The ARN of the topic for which you wish to
find subscriptions.
:type next_token: string
:param next_token: Token returned by the previous call to
this method.
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic}
if next_token:
params['NextToken'] = next_token
response = self.make_request('ListSubscriptions', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
| sorenh/cc | vendor/boto/boto/sns/__init__.py | Python | apache-2.0 | 13,553 | 0.003394 |
'''
Created on Jul 28, 2013
@author: Rob
'''
import os, yaml
config = {
'names': [
'NT',
'VGTestServer'
],
'servers':{
'irc.server.tld': {
'port':6667,
'password':None,
'channels':{
'#vgstation': {
'nudges':True,
'status':True
}
}
}
},
'plugins':
{
'redmine': {
'url': '',
'apikey':''
},
'nudge': {
'hostname': '',
'port': 45678,
'key': 'passwordgoeshere'
}
}
}
def ReadFromDisk():
global config
config_file = 'config.yml'
if not os.path.isfile(config_file):
with open(config_file, 'w') as cw:
yaml.dump(config, cw, default_flow_style=False)
with open(config_file, 'r') as cr:
config = yaml.load(cr)
# if config['database']['username'] == '' or config['database']['password'] == '' or config['database']['schema'] == '':
# print('!!! Default config.yml detected. Please edit it before continuing.')
# sys.exit(1)
def get(key,default=None):
global config
try:
parts = key.split('.')
value = config[parts[0]]
if len(parts) == 1:
return value
for part in parts[1:]:
value = value[part]
return value
except KeyError:
return default | mph55/lanstation13 | tools/bot/vgstation/common/config.py | Python | gpl-3.0 | 1,605 | 0.011838 |
# -*- coding: utf-8 -*-
#from __future__ import print_function, division, absolute_import, unicode_literals
#from gmusicapi.clients.webclient import Webclient
#from gmusicapi.clients.musicmanager import Musicmanager
from gmusicapi.clients.mobileclient import Mobileclient
#(Webclient, Musicmanager, Mobileclient) # noqa
| vially/googlemusic-xbmc | resources/Lib/gmusicapi/clients/__init__.py | Python | gpl-3.0 | 323 | 0.01548 |
l = []
for x in range(int(input())):
l.append(int(input()))
l.sort()
print(' '.join(str(x) for x in l[::-1]))
| NendoTaka/CodeForReference | Codingame/Python/Clash/SortHighLowReverse.py | Python | mit | 112 | 0.017857 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RemoveInstanceDatabase'
db.create_table(u'maintenance_removeinstancedatabase', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('current_step', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=0)),
('status', self.gf('django.db.models.fields.IntegerField')(default=0)),
('started_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('finished_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('can_do_retry', self.gf('django.db.models.fields.BooleanField')(default=True)),
('task_schedule', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name=u'maintenance_removeinstancedatabase_related', null=True, to=orm['maintenance.TaskSchedule'])),
('task', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'remove_instances_database_manager', to=orm['notification.TaskHistory'])),
('database', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'remove_instances_database_manager', to=orm['logical.Database'])),
('instance', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'remove_instances_database_manager', to=orm['physical.Instance'])),
))
db.send_create_signal(u'maintenance', ['RemoveInstanceDatabase'])
def backwards(self, orm):
# Deleting model 'RemoveInstanceDatabase'
db.delete_table(u'maintenance_removeinstancedatabase')
models = {
u'account.organization': {
'Meta': {'object_name': 'Organization'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'external': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grafana_datasource': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'grafana_endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'grafana_hostgroup': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'grafana_orgid': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'team_organization'", 'on_delete': 'models.PROTECT', 'to': u"orm['account.Organization']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'backup.snapshot': {
'Meta': {'object_name': 'Snapshot'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'end_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backup_environment'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Environment']"}),
'error': ('django.db.models.fields.CharField', [], {'max_length': '400', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backups'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backup_instance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Instance']"}),
'is_automatic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'purge_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'snapshopt_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'snapshot_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'start_at': ('django.db.models.fields.DateTimeField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'type': ('django.db.models.fields.IntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'volume': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backups'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Volume']"}),
'volume_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.addinstancestodatabase': {
'Meta': {'object_name': 'AddInstancesToDatabase'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'add_instances_to_database_manager'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number_of_instances': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_of_instances_before': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'add_instances_to_database_manager'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_addinstancestodatabase_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasechangeparameter_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseclone': {
'Meta': {'object_name': 'DatabaseClone'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_clone'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_clone'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_clone'", 'to': u"orm['physical.DatabaseInfra']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'origin_database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'origin_databases_clone'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_clone'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_clone'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseclone_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databaseconfiguressl': {
'Meta': {'object_name': 'DatabaseConfigureSSL'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'configure_ssl'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_configure_ssl'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseconfiguressl_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasecreate_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasedestroy': {
'Meta': {'object_name': 'DatabaseDestroy'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasedestroy_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasemigrate': {
'Meta': {'object_name': 'DatabaseMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['logical.Database']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_migrate'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'origin_environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Environment']"}),
'origin_offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Offering']", 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasemigrate_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasemigrateengine': {
'Meta': {'object_name': 'DatabaseMigrateEngine', '_ormbases': [u'maintenance.DatabaseUpgrade']},
'current_database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engine_migrations'", 'to': u"orm['logical.Database']"}),
u'databaseupgrade_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['maintenance.DatabaseUpgrade']", 'unique': 'True', 'primary_key': 'True'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasereinstallvm_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'source_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'target_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseresize_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaserestore_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseupgrade_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgradepatch': {
'Meta': {'object_name': 'DatabaseUpgradePatch'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades_patch'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_patch': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_minor_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.EnginePatch']"}),
'source_patch_full_version': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_patch': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_minor_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.EnginePatch']"}),
'target_patch_full_version': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades_patch'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseupgradepatch_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.filermigrate': {
'Meta': {'object_name': 'FilerMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'filer_migrate'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original_export_id': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'filer_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_filermigrate_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmigrate': {
'Meta': {'object_name': 'HostMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database_migrate': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'hosts'", 'null': 'True', 'to': u"orm['maintenance.DatabaseMigrate']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_migrate'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'migrate'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'snapshot': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'snapshot_migrate'", 'null': 'True', 'to': u"orm['backup.Snapshot']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_hostmigrate_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'zone': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'disable_alarms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.recreateslave': {
'Meta': {'object_name': 'RecreateSlave'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'recreate_slave'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'snapshot': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'snapshot_recreate_slave'", 'null': 'True', 'to': u"orm['backup.Snapshot']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'recreate_slave'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_recreateslave_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.removeinstancedatabase': {
'Meta': {'object_name': 'RemoveInstanceDatabase'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'remove_instances_database_manager'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'remove_instances_database_manager'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'remove_instances_database_manager'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_removeinstancedatabase_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.restartdatabase': {
'Meta': {'object_name': 'RestartDatabase'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'restart_database_manager'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restart_database_manager'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_restartdatabase_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.taskschedule': {
'Meta': {'object_name': 'TaskSchedule'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'task_schedules'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'method_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.updatessl': {
'Meta': {'object_name': 'UpdateSsl'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'update_ssl_manager'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'update_ssl_manager'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_updatessl_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'relevance': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '1'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.cloud': {
'Meta': {'object_name': 'Cloud'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'backup_hour': ('django.db.models.fields.IntegerField', [], {}),
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'engine_patch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EnginePatch']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'maintenance_day': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'maintenance_window': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'ssl_configured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'major_version': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'minor_version': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginepatch': {
'Meta': {'object_name': 'EnginePatch'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'patchs'", 'to': u"orm['physical.Engine']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_initial_patch': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'patch_path': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'patch_version': ('django.db.models.fields.PositiveIntegerField', [], {}),
'required_disk_size_gb': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'cloud': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'environment_cloud'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Cloud']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Offering']", 'null': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'null': 'True', 'blank': 'True'}),
'root_size_gb': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'ssl_expire_at': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'total_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'physical.offering': {
'Meta': {'object_name': 'Offering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'offerings'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'allowed_values': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'stronger_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'main_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'weaker_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recreate_slave': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_setup_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'metric_collector': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.volume': {
'Meta': {'object_name': 'Volume'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'volumes'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'total_size_kb': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_kb': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | globocom/database-as-a-service | dbaas/maintenance/migrations/0051_auto__add_removeinstancedatabase.py | Python | bsd-3-clause | 78,775 | 0.007553 |
from mrq.task import Task
from mrq.context import connections
class MongoTimeout(Task):
def run(self, params):
res = connections.mongodb_jobs.eval("""
function() {
var a;
for (i=0;i<10000000;i++) {
for (y=0;y<10000000;y++) {
a = Math.max(y);
}
}
return a;
}
""")
return res
| IAlwaysBeCoding/mrq | tests/tasks/mongodb.py | Python | mit | 381 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2017-2019 Barroux Abbey (www.barroux.org)
# Copyright 2017-2019 Akretion France (www.akretion.com)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields
class BaseConfigSettings(models.TransientModel):
_inherit = 'base.config.settings'
mass_validation_account_id = fields.Many2one(
related='company_id.mass_validation_account_id')
mass_validation_analytic_account_id = fields.Many2one(
related='company_id.mass_validation_analytic_account_id')
mass_validation_journal_id = fields.Many2one(
related='company_id.mass_validation_journal_id')
mass_post_move = fields.Boolean(related='company_id.mass_post_move')
| OCA/vertical-abbey | mass/base_config_settings.py | Python | agpl-3.0 | 793 | 0 |
# coding=utf-8
import socket
import thread
import time
import Queue
import re
import random
class IrcClient:
def __init__(self, host, port, nick, realname, printAll=True, isMibbitBot=False):
self.nick = nick
self.realname = realname
self.host = host
self.port = port
self.sock = socket.socket()
self.RecvQueue = Queue.Queue()
self.SendQueue = Queue.Queue()
self.printAll = printAll
self.EventHandlers = []
self.ignoredNicks = []
self.channels = []
self.sock.connect((host,port))
thread.start_new_thread(self.RecieveWorker, ())
thread.start_new_thread(self.SendWorker, ())
thread.start_new_thread(self.EventWorker, ())
self.RegisterEventHandler(self.PingEventHandler)
self.WaitForSilence()
self.Send("USER "+self.nick+" 0 * :"+self.realname)
self.Send("NICK "+self.nick)
self.WaitForSilence()
def RecieveWorker(self):
recvbuffer = ""
c = ""
while True:
c = self.sock.recv(1)
if c=='\n':
if self.printAll == True:
print("RECV: "+recvbuffer)
self.RecvQueue.put(recvbuffer)
recvbuffer = ""
else:
recvbuffer += c
def SendWorker(self):
while True:
toSend = self.SendQueue.get()
if self.printAll == True:
print("SEND: "+toSend)
self.sock.send(toSend)
def EventWorker(self):
while True:
recvItem = self.RecvQueue.get()
prefix = ""
command = ""
params = ""
trailing = ""
expression = re.compile(ur':([\w!.@-]*) {0,1}([A-Za-z0-9]*) {0,1}([\w# ]*) {0,1}:{0,1}(.*)')
match = re.search(expression, recvItem)
if match != None:
prefix = match.group(1)
command = match.group(2)
params = match.group(3)
trailing = match.group(4)
for func in self.EventHandlers:
try:
func(self, recvItem, prefix, command, params, trailing)
except:
print("WARNING: Error in handler function!")
pass
def WaitForSilence(self, maxIterations=10, delay=0.2):
time.sleep(delay)
while self.RecvQueue.empty != True:
time.sleep(delay)
maxIterations -= 1;
if maxIterations <= 0:
break;
pass;
def RegisterEventHandler(self, func):
self.EventHandlers.append(func)
def RemoveEventHandler(self, func):
try:
self.EventHandlers.remove(func)
except:
print("WARNING: tried to remove unknown handler!")
pass
def Send(self, cmd):
self.SendQueue.put(cmd+'\n')
def PingEventHandler(self, client, event, prefix, command, params, trailing):
if event[:4] == "PING":
self.Send("PONG"+event[4:])
def SendMessage(self, destination, message):
self.Send("PRIVMSG "+destination+" :"+message)
def BroadcastMessage(self, message):
for channel in self.channels:
self.SendMessage(channel, message)
def SetNick(self, nickname):
self.Send("NICK "+nickname)
def JoinChannel(self, channelname, channelpassword=""):
self.Send("JOIN "+channelname+" "+channelpassword)
self.channels.append(channelname)
def LeaveChannel(self, channelname):
self.Send("PART "+channelname)
try:
self.channels.remove(channelname)
except:
print("WARNING: Tried to leave channel "+channelname+", but you arent in that channel!")
pass
def AddIgnore(self, name):
self.ignoredNicks.append(name)
def RemoveIgnore(self, name):
try:
self.ignoredNicks.remove(name)
except:
print("WARNING: You didnt ignore "+name+" in the first place!")
pass
def IsIgnored(self, name):
if name in self.ignoredNicks:
return True
else:
return False
def Identify(self, password):
self.SendMessage("nickserv", "identify "+password)
| mkalte666/Dragonflame | IrcClient.py | Python | mit | 3,540 | 0.05113 |
#!/usr/bin/env python
import setuptools
if __name__ == "__main__":
setuptools.setup(
name="aecg100",
version="1.1.0.18",
author="WHALETEQ Co., LTD",
description="WHALETEQ Co., LTD AECG100 Linux SDK",
url="https://www.whaleteq.com/en/Support/Download/7/Linux%20SDK",
include_package_data=True,
package_data={
'': ['sdk/*.so', 'sdk/*.h', 'sample/python/*.txt']
},
)
| benian/aecg100 | setup.py | Python | mit | 418 | 0.023923 |
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import api_select
prefixed_jobs = """
serial flow: [
job: 'top_quick1'
serial flow: [
job: 'top_x_quick2-1'
]
serial flow: [
job: 'top_x_quick2-2'
]
serial flow: [
job: 'top_x_quick2-3'
]
job: 'top_quick3'
parallel flow: (
serial flow: [
job: 'top_y_z_quick4a'
]
serial flow: [
job: 'quick4b'
]
job: 'top_y_quick5'
)
]
"""
def test_prefix(api_type, capsys):
with api_select.api(__file__, api_type) as api:
def job(name):
api.job(name, max_fails=0, expect_invocations=0, expect_order=None, params=None)
api.flow_job()
job('quick1')
index = 0
for index in 1, 2, 3:
job('x_quick2-' + str(index))
job('quick3')
job('y_z_quick4')
job('y_quick5')
with serial(api, timeout=70, report_interval=3, job_name_prefix='top_', just_dump=True) as ctrl1:
ctrl1.invoke('quick1')
for index in 1, 2, 3:
with ctrl1.serial(timeout=20, report_interval=3, job_name_prefix='x_') as ctrl2:
ctrl2.invoke('quick2-' + str(index))
ctrl1.invoke('quick3')
with ctrl1.parallel(timeout=40, report_interval=3, job_name_prefix='y_') as ctrl2:
with ctrl2.serial(timeout=40, report_interval=3, job_name_prefix='z_') as ctrl3a:
ctrl3a.invoke('quick4a')
# Reset prefix
with ctrl2.serial(timeout=40, report_interval=3, job_name_prefix=None) as ctrl3b:
ctrl3b.invoke('quick4b')
ctrl2.invoke('quick5')
sout, _ = capsys.readouterr()
assert prefixed_jobs.strip() in sout
| lhupfeldt/jenkinsflow | test/prefix_test.py | Python | bsd-3-clause | 1,934 | 0.003619 |
#!/usr/bin/python
import apt_pkg
import logging
import os
import mock
import sys
import tempfile
import unittest
sys.path.insert(0, "..")
from unattended_upgrade import _setup_logging
class MockOptions:
dry_run = False
debug = False
class TestLogdir(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
apt_pkg.init()
self.mock_options = MockOptions()
def test_logdir(self):
# test log
logdir = os.path.join(self.tempdir, "mylog")
apt_pkg.config.set("Unattended-Upgrade::LogDir", logdir)
logging.root.handlers = []
_setup_logging(self.mock_options)
self.assertTrue(os.path.exists(logdir))
def test_logdir_depreated(self):
# test if the deprecated APT::UnattendedUpgrades dir is not used
# if the new UnaUnattendedUpgrades::LogDir is given
logdir = os.path.join(self.tempdir, "mylog-use")
logdir2 = os.path.join(self.tempdir, "mylog-dontuse")
apt_pkg.config.set("Unattended-Upgrade::LogDir", logdir)
apt_pkg.config.set("APT::UnattendedUpgrades::LogDir", logdir2)
logging.root.handlers = []
_setup_logging(self.mock_options)
self.assertTrue(os.path.exists(logdir))
self.assertFalse(os.path.exists(logdir2))
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main()
| Jimdo/unattended-upgrades | test/test_logdir.py | Python | gpl-2.0 | 1,396 | 0.002149 |
def countingsort(sortablelist):
maxval = max(sortablelist)
m = maxval + 1
count = [0] * m # init with zeros
for a in sortablelist:
count[a] += 1 # count occurences
i = 0
for a in range(m): # emit
for c in range(count[a]): # - emit 'count[a]' copies of 'a'
sortablelist[i] = a
i += 1
def main():
import random
a = [random.randint(0, 1000) for i in range(100)]
countingsort(a)
print (a)
main()
| NendoTaka/CodeForReference | Python/Sort/CountingSort.py | Python | mit | 511 | 0.007828 |
# $Id$
#
from rdkit import Chem
from rdkit.Chem import rdReducedGraphs as rdRG
from rdkit import RDConfig
import numpy
import unittest
class TestCase(unittest.TestCase) :
def setUp(self):
pass
def test1(self):
m = Chem.MolFromSmiles('OCCc1ccccc1')
mrg = rdRG.GenerateMolExtendedReducedGraph(m)
mrg.UpdatePropertyCache(False)
self.failUnlessEqual('[*]cCCO',Chem.MolToSmiles(mrg))
m = Chem.MolFromSmiles('OCCC1CCCCC1')
mrg = rdRG.GenerateMolExtendedReducedGraph(m)
mrg.UpdatePropertyCache(False)
self.failUnlessEqual('[*]CCCO',Chem.MolToSmiles(mrg))
def test2(self):
m = Chem.MolFromSmiles('OCCc1ccccc1')
mrg = rdRG.GenerateMolExtendedReducedGraph(m)
mrg.UpdatePropertyCache(False)
self.failUnlessEqual('[*]cCCO',Chem.MolToSmiles(mrg))
fp1 = rdRG.GenerateErGFingerprintForReducedGraph(mrg)
fp2 = rdRG.GetErGFingerprint(m)
md = max(abs(fp1-fp2))
self.failUnless(md<1e-4)
def test3(self):
m = Chem.MolFromSmiles('OCCc1ccccc1')
fp1 = rdRG.GetErGFingerprint(m)
m = Chem.MolFromSmiles('OCCC1CC=CC=C1')
fp2 = rdRG.GetErGFingerprint(m)
md = max(abs(fp1-fp2))
self.failUnlessAlmostEqual(0.0,md,4)
def test4(self):
m = Chem.MolFromSmiles('OCCc1ccccc1')
fp1 = rdRG.GetErGFingerprint(m)
fp2 = rdRG.GetErGFingerprint(m,fuzzIncrement=0.1)
md = max(abs(fp1-fp2))
self.failUnlessAlmostEqual(0.2,md,4)
if __name__ == '__main__':
unittest.main()
| soerendip42/rdkit | Code/GraphMol/ReducedGraphs/Wrap/testReducedGraphs.py | Python | bsd-3-clause | 1,470 | 0.014286 |
# -*- encoding: utf-8 -*-
{
'name': 'Export Inventory Costs',
'version': '3.0.0.0',
'category': "Warehouse Management",
'description': """
Export Inventory Costs
""",
'author': 'Didotech SRL',
'website': 'http://www.didotech.com',
'license': 'AGPL-3',
"depends": [
'base',
'stock',
],
"data": [
'wizard/wizard_inventory_costs_view.xml',
'views/stock_view.xml'
],
"demo": [],
"active": False,
"installable": True,
"application": True,
}
| iw3hxn/LibrERP | stock_inventory_export/__openerp__.py | Python | agpl-3.0 | 541 | 0 |
## numpy-oldnumeric calls replaced by custom script; 09/06/2016
## Automatically adapted for numpy-oldnumeric Mar 26, 2007 by alter_code1.py
##
## Biskit, a toolkit for the manipulation of macromolecular structures
## Copyright (C) 2004-2018 Raik Gruenberg & Johan Leckner
##
## This program is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You find a copy of the GNU General Public License in the file
## license.txt along with this program; if not, write to the Free
## Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
##
##
"""
Parallellized AmberEntropist calculation.
"""
import os.path, copy
import Biskit.oldnumeric as N0
import Biskit.tools as T
import Biskit.settings as settings
import Biskit.mathUtils as MU
from Biskit.PVM.TrackingJobMaster import TrackingJobMaster
from Biskit.PVM.hosts import cpus_all, nice_dic
from Biskit import PDBModel, PDBProfiles, EHandler, StdLog
from Biskit.Dock import Complex
slave_path = T.projectRoot()+"/Biskit/AmberEntropySlave.py"
class AmberEntropyMaster(TrackingJobMaster):
"""
Run many AmberEntropist calculations on many nodes. The Master has
a standard set of 13 protocols to run on rec, lig, and com
trajectories, as well as on every single member trajectory - in
total 113. It accepts one variable parameter, e.g. s(tart). Each
protocol is then run for all values of the variable parameter. A
protocol is simply a set of options that are passed on to the
AmberEntropist (which is run from within AmberEntropySlave).
Comparing the different protocols allows to more or less separate
random from real correlations, rigid body from intermolecular
vibrations, etc.
Results are put into a tree-shaped dictionary of dictionaries. The
first dimension/key is the member index -- None for the complete
ensemble trajectory, 0 for the first member, etc. The second
dimension/key is the name of the protocol, e.g. 'com_split' for
the complex trajectory with seperately fitted receptor and
ligand. The last dimension contains the different values obtained
from the ptraj run, e.g. 'S_total' points to the total entropy in
cal/mol/K, 'contributions' to the entropy contribution of each
mode, 'T' to the assumed temperature, 'vibes' gives the number of
vibrations with too low frequencies (according to ptraj). All these
are lists of values - one for each value of the variable option.
Example::
* r[None]['fcom']['S_vibes'][0] -> float
first vibr. Entropy of free fake complex for complete ensemble
* r[0]['com']['S_total'] -> [ float, float, .. ]
the total entropies of the complex calculated for the first
ensemble member and the different values of the variable option
"""
def __init__(self, rec=None, lig=None, com=None, out=None,
cr=None, var='s', vrange=[0], jack=0,
zfilter=None, clean=0, all=1,
exrec=[], exlig=[], excom=[],
hosts=cpus_all,
niceness=nice_dic,
w=0, a=1, debug=0,
restart=0,
**kw ):
"""
@param rec: free rec trajectory [required]
@type rec: str
@param lig: free lig trajectory [required]
@type lig: str
@param com: complex trajectory [required]
@type com: str
@param out: file name for pickled result [required]
@type out: str
@param cr: chains of receptor in complex trajectory [n_chains rec]
@type cr: [int]
@param var: name of variable option [ s ]
@type var: str
@param vrange: set of values used for variable option
OR 'start:stop:step', string convertable to
range() input
@type vrange: [any]
@param jack: set up leave-one-trajectory-out jackknife test
(default: 0) (replaces var with 'ex1' and vrange with
range(1,n_members+1))
@type jack: [0|1]
@param zfilter: kick out outlyer trajectories using z-score threshold
on RMSD trace (default: None->don't)
@type zfilter: float
@param clean: remove pickled ref models and member trajectories
(default: 0)
@type clean: 0|1
@param all: skip single member trajs (default: 1)
@type all: 0|1
@param exrec: exclude certain members of receptor ensemble [[]]
@type exrec: [int]
@param exlig: exclude certain members of ligand ensemble [[]]
@type exlig: [int]
@param excom: exclude certain members of complex ensemble [[]]
@type excom: [int]
@param hosts: nodes to be used (default: all known)
@type hosts: [str]
@param debug: don't delete output files (default: 0)
@type debug: 1|0
@param kw: additional key=value parameters for AmberEntropist,
AmberCrdEntropist, Executor and Master.
@type kw: key=value pairs
::
... parameters for AmberEntropist
cast - 1|0, equalize free and bound atom content [1]
s,e - int, start and stop frame [0, to end]
atoms - [ str ], names of atoms to consider [all]
protein - 1|0, remove non-protein atoms [0..don't]
step - int, frame offset [no offset]
thin - float, use randomly distributed fraction of frames [all]
(similar to step but perhaps better for entropy
calculations)
ex - [int] OR ([int],[int]), exclude member trajectories [[]]
ex_n - int, exclude last n members OR... [None]
ex3 - int, exclude |ex3|rd tripple of trajectories [0]
(index starts with 1! 0 to exclude nothing)
... parameters for AmberCrdEntropist
f_template - str, alternative ptraj input template [default]
... parameters for Executor:
log - Biskit.LogFile, program log (None->STOUT) [None]
verbose - 0|1, print progress messages to log [log != STDOUT]
... parameters for Master
w - 0|1, show X window for each slave [0]
a - 0|1, add hosts to PVM [1]
"""
## normal and error output
self.fout = T.absfile( out )
self.ferror = os.path.dirname(self.fout) +'/AmberEntropy_errors.log'
self.debug = debug
self.log = StdLog()
## input files and variable option
self.rec = T.absfile( rec, 0 )
self.lig = T.absfile( lig, 0 )
self.com = T.absfile( com, 0 )
self.cr = cr
self.cl = None
self.var = var
self.vrange = self.__vrange( vrange )
self.jack = jack
self.zfilter = zfilter
self.n_members = None
self.clean = clean
self.all = all
## members to exclude, outliers will be added if zfilter is not None
self.ex_frec = exrec
self.ex_flig = exlig
self.ex_com = excom
## reserve for loaded reference models
self.ref_frec = self.ref_flig = None
self.ref_brec = self.ref_blig = self.ref_com = None
## reserve for extracted member trajectories
self.members_frec = self.members_flig = []
self.members_brec = self.members_blig = []
## options to be passed on to AmberEntropist
self.options = kw
if not restart:
## Load trajectories, dump references, identify outliers
self.processTrajs()
## prepare dict of protocols for AmberEntropist
self.protocols = self.protocols_var_range( **kw )
self.saveProtocols()
TrackingJobMaster.__init__(self, self.protocols,
chunk_size=1,
hosts=hosts,
niceness=niceness,
slave_script=slave_path,
show_output=w,
add_hosts=a)
print "JobMaster initialized."
def __vrange( self, v ):
"""
Interprete the vrange option -> [ int ] or [ float ]
@param v: vrange option
@type v: lst OR str
@return: range option
@rtype: [int] OR [float]
"""
if type( v ) is list:
return [ self.__float_int(x) for x in v ]
if type( v ) is str and ':' in v:
v = tuple( [ self.__float_int(x) for x in v.split(':') ] )
return N0.arange( *v )
return self.__float_int( v )
def __float_int( self, v ):
"""
Convert v to int or, if necessary, float
@param v: value
@type v: any
@return: converted value
@rtype: int OR float
"""
if float(v) % 1. != 0:
return float( v )
return int( float(v) )
def loadTraj( self, fname, outliers=[], refname=None ):
"""
Load trajectory from file.
@param fname: path to trajectory
@type fname: str
@param outliers: Identify outlier trajectories (default: [], identify)
@type outliers: [int] OR []
@param refname: name of reference (efault: None)
@type refname: str
@return: t, outliers, members
@rtype: trajectoty, [int], [int]
"""
self.log.add('Loading ' + fname )
t = T.load( fname )
t.ref.addChainId()
t = t.compressAtoms( t.ref.maskProtein() )
outliers = self.getOutliers( t, outliers )
if refname:
self.dumpMissing( t.ref, refname )
members = None
if not self.all:
members = self.dumpMembers( t, self.rec )
return t, outliers, members
def processTrajs( self ):
"""
Extract reference model and member trajectories from rec, lig, and
com trajectories. Identify outlier member trajectories, if requested.
"""
## free rec
self.ref_frec = self.nameRef( self.rec )
t, self.ex_frec, self.members_frec = self.loadTraj(
self.rec, self.ex_frec, self.ref_frec )
n_rec_members = t.n_members
self.cr = self.cr or range( t.ref.lenChains( breaks=0 ) )
del t
## free lig
self.ref_flig = self.nameRef( self.lig )
t, self.ex_flig, self.members_flig = self.loadTraj(
self.lig, self.ex_flig, self.ref_flig )
n_lig_members = t.n_members
del t
## complex
fname = T.stripSuffix( T.absfile( self.com, resolveLinks=0 ) )
self.ref_com = fname + '_ref.complex'
self.ref_blig= fname + '_blig.model'
self.ref_brec= fname + '_brec.model'
t, self.ex_com, self.members_com = self.loadTraj(
self.com, self.ex_com )
n_com_members = t.n_members
self.cl = self.cl or MU.difference( range(t.ref.lenChains()), self.cr)
rec = t.ref.takeChains( self.cr, breaks=0 )
lig = t.ref.takeChains( self.cl, breaks=0 )
del t
self.dumpMissing( Complex( rec, lig ), self.ref_com )
self.dumpMissing( rec, self.ref_brec )
self.dumpMissing( lig, self.ref_blig )
self.equalizeMemberCount( n_rec_members, n_lig_members, n_com_members )
if self.jack: self.prepareJackknife()
def equalizeMemberCount( self, n_rec, n_lig, n_com ):
"""
Ensure we keep equal number of members trajectories from frec,
flig, and com.
@param n_rec: number of receptor trajectories
@type n_rec: int
@param n_lig: number of ligand trajectories
@type n_lig: int
@param n_com: number of complex trajectories
@type n_com: int
"""
ex = [ self.ex_frec, self.ex_flig, self.ex_com ]
n_members = [ n_rec, n_lig, n_com ]
## pair list of excluded members with number of remaining members
ex = [ ( ex[i], n_members[i] - len(ex[i]) ) for i in range(3) ]
## lowest number of members after exclusion
n_min = min( [ x[1] for x in ex ] )
self.log.add('excluding non-outliers to match member count: ')
label = ['com','lig','rec']
for x, n in ex:
i = 0
s = label.pop()
while n > n_min:
self.log.write( '%s: ' % s )
if not i in x:
x.append( i )
n -= 1
self.log.write('%i, ' % i )
i += 1
self.log.add('')
self.n_members = n_min
def prepareJackknife( self ):
"""
Prepare leave-one-trajectory-out jackknife test.
"""
self.vrange = range( self.n_members + 1 ) ## 0: exclude nothing
self.var = 'ex1'
def nameRef( self, fname ):
fname = T.stripSuffix( T.absfile( fname, resolveLinks=0 ) )
return fname + '_ref.model'
def nameRefCom( self, fname ):
fname = T.stripSuffix( T.absfile( fname, resolveLinks=0 ) )
return fname + '_ref.complex'
def dumpMissing( self, o, fname ):
"""
Pickle *o* to path *fname*, if it is not already there.
@param o: object to dump
@type o: any
@param fname: file name
@type fname: str
@return: file name
@rtype: str
"""
if os.path.exists( fname ):
self.log.add('using existing ' + fname )
else:
self.log.add('Saving ' + fname )
T.dump( o, fname )
return fname
def getOutliers( self, traj, outlaws=[] ):
"""
Identify member trajectories that haved moved much further than normal.
@param traj: Trajectory to analyze
@type traj: Trajectory
@param outlaws: members already marked for exclusion
@type outlaws: [int]
@return: member indices of outlyer trajectories (plus outlaws)
@rtype: [int]
"""
if not self.zfilter:
return outlaws
outliers = N0.nonzero( traj.outliers( z=self.zfilter,
mask=traj.ref.maskCA(), step=10) )
self.log.add('identified %i outliers with z-threshold %3.1f' %\
( len(outliers), self.zfilter ) )
return MU.union( outliers, outlaws )
def dumpMembers( self, traj, fname ):
"""
Dump ensemble member trajectories
@param traj: Trajectory to dump
@type traj: Trajectory
@param fname: trajectory file name - used to derrive name for members
@type fname: str'
@return: list of trajectory files
@rtype: [str]
"""
fname = T.stripSuffix( T.absfile( fname, resolveLinks=0 ) )
members = range( traj.n_members )
r = []
for n in members:
f = fname + '_member_%02i.traj' % n
if os.path.exists( f ):
self.log.add('using existing ' + f )
else:
self.log.write('saving ' + f + '...')
m = traj.takeMember( n )
T.dump( m, f )
self.log.add('done')
r += [ f ]
return r
def getInitParameters(self, slave_tid):
"""
hand over parameters to slave once.
@param slave_tid: slave task id
@type slave_tid: int
@return: dictionary with init parameters
@rtype: {param:value}
"""
host = self.hostnameFromTID( slave_tid )
nice = self.niceness.get( host, self.niceness.get('default',0) )
return {'ferror':self.ferror,
'debug':self.debug, 'nice':nice, 'host':host}
def cleanup( self ):
"""
Tidy up
"""
if self.clean:
self.cleanCache()
def cleanCache( self ):
"""
Remove left-over cache files
"""
fs = [ self.ref_frec, self.ref_flig, self.ref_com, self.ref_brec,
self.ref_blig ]
fs.extend( self.members_frec + self.members_flig )
fs.extend( self.members_brec + self.members_blig )
fs.extend( self.members_com )
for f in fs:
self.log.add('removing %s: %i' % (f, T.tryRemove(f)) )
def saveProtocols( self ):
"""
Save protocol to file.
"""
f_prot = T.stripSuffix( T.absfile(self.fout) ) + '_protocols.dat'
self.log.write( 'Saving parameters to %s...' % f_prot )
T.dump( self.protocols, f_prot )
def done(self):
"""
Write result to file.
"""
tree = self.getResult()
self.log.add("Saving result to %s..." % self.fout)
T.dump( tree, self.fout )
self.log.add( "Done" )
##
## Assemble the protocols for many AmberEntropist runs
##
def __cpupdate( self, d1, d2 ):
"""
Merge 2 dictionaries *d1* and *d2* and return a copy
"""
r = copy.copy( d1 )
r.update( d2 )
return r
def protocols_standard( self, trec, tlig, tcom,
ex_frec=None, ex_flig=None, ex_com=None,
doshift=1,
**options ):
"""
Create 13 parameter sets for AmberEntropist that cover the calculation
of rec, lig, com and fcom entropies with and without splitting of the
complex, with and without shifting and shuffling of frames.
@param options: additional options (like cast, s, e, atoms, thin, step)
that are the same in all parameter sets
@type options: key=value
@return: each value of the returned dict contains a set of
arguments for one AmberEntropist run
@rtype: dict of dict
"""
fcp = self.__cpupdate
r = {}
S = self ## make rest more readable
d = { 'ref':None, 'cast':1, 'chains':None,
'split':0, 'shift':0, 'shuffle':0, 'ex_n':0, 'ex3':None,
'thin':None, 'step':1, 'ss':0, 'se':None, 'atoms':None }
d.update( options )
r['frec'] = fcp( d, {'traj':trec, 'ref':S.ref_brec, 'ex':ex_frec } )
r['flig'] = fcp( d, {'traj':tlig, 'ref':S.ref_blig, 'ex':ex_flig } )
r['brec'] = fcp( d, {'traj':tcom, 'ref':S.ref_frec, 'ex':ex_com,
'chains':S.cr } )
r['blig'] = fcp( d, {'traj':tcom, 'ref':S.ref_flig, 'ex':ex_com,
'chains':S.cl } )
r['fcom'] = fcp( d, {'traj':'%s+%s'%(trec, tlig),
'ex':(ex_frec, ex_flig),
'ref':S.ref_com, 'split':1 } )
## if doshift:
## r['fcom_shift'] = fcp( r['fcom'], {'shift':1 } )
r['fcom_shuff'] = fcp( r['fcom'], {'shuffle':1 } )
r['com'] = fcp( d, {'traj':tcom, 'ex':ex_com,
'ref':'%s+%s' % (S.ref_frec, S.ref_flig) } )
r['com_split'] = fcp( r['com'], { 'split':1, 'border':S.cl[0] } )
## r['com_shuff'] = fcp( r['com'], { 'shuffle':1, 'border':S.cl[0] } )
r['com_split_shuff'] = fcp( r['com'],
{'split':1,'shuffle':1,'border':S.cl[0] } )
if doshift:
## r['com_shift'] = fcp( r['com'], { 'shift':1,'border':S.cl[0] } )
r['com_split_shift'] = fcp( r['com'],
{'split':1,'shift':1, 'border':S.cl[0] } )
return r
def protocols_single_all( self, **options ):
"""
Set of protocols for all-member trajectories AND single-member traj.
with the different shuffle, shift, split settings.
Usually 11 x 13 protocols for AmberEntropist (10 members and 1 for all)
@param options: additional options (like cast, s, e, atoms, thin, step)
that are the same in all parameter sets
@type options: key=value
@return: each value of the returned dict contains a set of arguments
for one AmberEntropist run, each key is a tuple of the
member index and the protocol name, i.e. (0, 'fcom_shuffle')
The set of protocols for all-member trajectories has member
index None.
@rtype: dict of dict
"""
r = {}
## put all-member protocolls under member index 'None'
prots = self.protocols_standard( self.rec, self.lig, self.com,
self.ex_frec, self.ex_flig, self.ex_com,
**options )
for k,p in prots.items():
r[ (None, k) ] = p
if not self.all:
## put single-member protocols under their respective member index
for i in range( len( self.members_frec ) ):
prots = self.protocols_standard(self.members_frec[i],
self.members_flig[i],
self.members_com[i], doshift=0,
**options )
for k, p in prots.items():
r[ (i, k) ] = p
return r
def protocols_var_range( self, **options ):
"""
Complete set of protocols also considering different values of the
variable option.
"""
self.log.add( 'variable option %s with %i values' \
% (self.var, len(self.vrange)))
r = {}
for v in self.vrange:
d = copy.copy( options )
d[ self.var ] = v
prots = self.protocols_single_all( **d )
for k, p in prots.items():
r[ (v,) + k ] = p
return r
##
## Re-organize results
##
def dictionate( self, d ):
"""
Take dict with tuple keys (value, int_member, str_protocol) and build
a tree-like dict of dicts in which the values of d can be accessed
like::
d[value][int_member][str_protocol]
@param d: the raw results accumulated from the slave nodes
@type d: dict
@return: tree-like dict ordered by variable value, member, protocol
@rtype: dict of dict of dict of dict
"""
r = {}
keys = d.keys()
## only convert single value tuple keys into non-tuple keys
if len( keys[0] ) == 1:
for k in keys:
r[ k[0] ] = d[ k ]
return r
x_values = MU.nonredundant( [ k[0] for k in keys ] )
for x in x_values:
sub_keys = [ k for k in keys if k[0] == x ]
y_values = MU.nonredundant( [ k[1:] for k in sub_keys] )
r[ x ] = {}
for y in y_values:
r[x][y] = d[ (x,) + y ]
r[ x ] = self.dictionate( r[x] )
return r
def getResult( self, **arg ):
"""
Collapse the results for different values of the variable parameter
into lists and put the results into a tree ala::
r[ member_index ][ protocol_name ][ result_field ] -> [ values ]
@return: tree-like dict ordered by variable value, member, protocol
@rtype: dict of dict of dict of lists
"""
tree = self.dictionate( self.result )
vvalues = tree.keys()
vvalues.sort()
keys = self.result.keys()
sub_keys = [ k for k in keys if k[0] == vvalues[0] ]
r = {}
for v, member, protcl in sub_keys:
try:
if not member in r:
r[member] = {}
r[member][protcl] = {}
run_dic = tree[v][member][protcl]
for k in run_dic.keys():
r[member][protcl][k] = [ tree[v][member][protcl][k] \
for v in vvalues ]
except:
EHandler.warning('missing result: ' + str(T.lastError()))
r['var'] = self.var
r['vrange']= self.vrange
r['protocols'] = self.protocols
self.result_tree = r
return r
#### TEST #######
if __name__ == '__main__':
niceness = {'default': 0}
hosts = cpus_all[:80]
f = T.testRoot() + '/Amber/AmberEntropyMaster/'
rec = f + 'rec/traj.dat'
lig = f + 'lig/traj.dat'
com = f + 'com/traj.dat'
out = f + 'entropy.out'
master = AmberEntropyMaster( rec, lig, com, out, step=1,
atoms=['CA','CB'],
var='ex1', vrange='0:10',
exrec=[1],exlig=[0],
all=1,
hosts=hosts, niceness=niceness,
w=1 )
master.start()
| graik/biskit | archive_biskit2/Biskit/AmberEntropyMaster.py | Python | gpl-3.0 | 25,638 | 0.017396 |
#! /usr/bin/env python
#coding=utf-8
## @Configuration of Preprocessing for SEIMS
#
# TODO, give more detailed description here.
import os,platform
## Directionaries
if platform.system() == "Windows":
DATA_BASE_DIR = r'E:\github-zlj\model_data\model_dianbu_30m_longterm\data_prepare'
PREPROC_SCRIPT_DIR = r'E:\github-zlj\SEIMS\preprocess'
CPP_PROGRAM_DIR = r'E:\github-zlj\SEIMS_Preprocess\Debug'
METIS_DIR = r'E:\github-zlj\SEIMS_Preprocess\metis\programs\Debug'
MPIEXEC_DIR = None
elif platform.system() == "Linux":
DATA_BASE_DIR = r'/data/liujz/data'
PREPROC_SCRIPT_DIR = r'/data/hydro_preprocessing'
CPP_PROGRAM_DIR = r'/data/hydro_preprocessing/cpp_programs'
METIS_DIR = r'/soft/programming/metis-5.1.0/build/programs'
MPIEXEC_DIR = None
CLIMATE_DATA_DIR = DATA_BASE_DIR + os.sep + 'climate'
SPATIAL_DATA_DIR = DATA_BASE_DIR + os.sep + 'spatial'
WORKING_DIR = DATA_BASE_DIR + os.sep + 'output'
## MongoDB related
#HOSTNAME = '192.168.6.55'
HOSTNAME = '127.0.0.1'
PORT = 27017
ClimateDBName = 'climate_dianbu'
SpatialDBName = 'model_dianbu_30m_longterm'
forCluster = False
stormMode = False
if forCluster and 'cluster_' not in SpatialDBName.lower():
SpatialDBName = 'cluster_' + SpatialDBName
## Climate Input
PrecSitesVorShp = CLIMATE_DATA_DIR + os.sep + 'shp' + os.sep + 'Preci_dianbu_Vor.shp'
if stormMode:
PrecStormSitesVorShp = CLIMATE_DATA_DIR + os.sep + 'shp' + os.sep + 'Preci_dianbu_Vor_storm.shp'
MeteorSitesVorShp = CLIMATE_DATA_DIR + os.sep + 'shp' + os.sep + 'Metero_hefei_Vor.shp'
PrecExcelPrefix = CLIMATE_DATA_DIR + os.sep + 'precipitation_by_day_'
PrecDataYear = [2014]
MeteoVarFile = CLIMATE_DATA_DIR + os.sep + 'Variables.txt'
MeteoDailyFile = CLIMATE_DATA_DIR + os.sep+ 'meteorology_dianbu_daily.txt'
MetroSiteFile = CLIMATE_DATA_DIR + os.sep + 'sites_hefei.txt'
DischargeExcelPrefix = CLIMATE_DATA_DIR + os.sep + 'discharge_by_day_'
DischargeYear = [2014]
## Parameters for SEIMS
sqliteFile = DATA_BASE_DIR + os.sep + "Parameter.db3"
## Spatial Input
dem = SPATIAL_DATA_DIR + os.sep + 'dem_30m.tif'
outlet_file = SPATIAL_DATA_DIR + os.sep + 'outlet_30m.shp'
threshold = 0 # threshold for stream extraction from D8-flow accumulation weighted Peuker-Douglas stream sources
# if threshold is 0, then Drop Analysis is used to select the optimal value.
np = 4 # number of parallel processors
landuseFile = SPATIAL_DATA_DIR + os.sep + 'landuse_30m.tif'
sandList = []
clayList = []
orgList = []
for i in [1,2]:
sandFile = SPATIAL_DATA_DIR + os.sep + "sand" + str(i) + ".tif"
clayFile = SPATIAL_DATA_DIR + os.sep + "clay" + str(i) + ".tif"
orgFile = SPATIAL_DATA_DIR + os.sep + "org" + str(i) + ".tif"
sandList.append(sandFile)
clayList.append(clayFile)
orgList.append(orgFile)
defaultSand = 40
defaultClay = 30
defaultOrg = 2.5
## Predefined variables
CROP_FILE = PREPROC_SCRIPT_DIR + os.sep + 'crop.txt'
CROP_ATTR_LIST = ["IDC", "EXT_COEF", "BMX_TREES", "BLAI", "HVSTI",\
"MAT_YRS", "T_BASE", "FRGRW1", "FRGRW2", "LAIMX1",\
"LAIMX2", "DLAI", "BN1", "BN2", "BN3", "BP1", "BP2",\
"BP3", "BIO_E", "BIOEHI", "CO2HI", "WAVP", "BIO_LEAF",\
"RDMX","CNYLD", "CPYLD", "WSYF", "DLAI", "T_OPT"]
# LANDUSE_ATTR_LIST and SOIL_ATTR_LIST is selected from sqliteFile database
LANDUSE_ATTR_LIST = ["Manning", "Interc_max", "Interc_min", "RootDepth", \
"USLE_C", "SOIL_T10","USLE_P"]
LANDUSE_ATTR_DB = ["manning","i_max","i_min", "root_depth", "usle_c", "SOIL_T10"]
## Be caution, the sequence from "Sand" to "Poreindex" if fixed because of soil_param.py.
SOIL_ATTR_LIST = ["Sand", "Clay", "WiltingPoint", "FieldCap", "Porosity","Density",\
"Conductivity", "Poreindex", "USLE_K", "Residual", ]
SOIL_ATTR_DB = ["sand", "clay","wp", "fc", "porosity","B_DENSITY","ks", "P_INDEX",\
"usle_k", "rm"]
### There are 15 attributes in SoilLookup table now.
### They are [SOILCODE], [SNAM], [KS](Conductivity), [POROSITY], [FC](field capacity), [P_INDEX](Poreindex), [RM],
### [WP](wiltingpoint), [B_DENSITY], [SAND], [CLAY], [SILT], [USLE_K], [TEXTURE], [HG]
## Hydrological parameters
coeTable = {"T2":[0.05, 0.48],"T10":[0.12, 0.52], "T100":[0.18,0.55]} ## used in radius.py
## Conventional Spatial Raster Data File Names
filledDem = "demFilledTau.tif"
flowDir = "flowDirTauD8.tif"
slope = "slopeTau.tif"
acc = "accTauD8.tif"
streamRaster = "streamRasterTau.tif"
flowDirDinf = "flowDirDinfTau.tif"
dirCodeDinf = "dirCodeDinfTau.tif"
slopeDinf = "slopeDinfTau.tif"
weightDinf = "weightDinfTau.tif"
modifiedOutlet = "outletM.shp"
streamSkeleton = "streamSkeleton.tif"
streamOrder = "streamOrderTau.tif"
chNetwork = "chNetwork.txt"
chCoord = "chCoord.txt"
streamNet = "streamNet.shp"
subbasin = "subbasinTau.tif"
mask_to_ext = "mask.tif"
## masked file names
subbasinM = "subbasinTauM.tif"
flowDirM = "flowDirTauM.tif"
streamRasterM = "streamRasterTauM.tif"
## output to mongoDB file names
reachesOut = "reach.shp"
subbasinOut = "subbasin.tif"
flowDirOut = "flow_dir.tif"
streamLinkOut = "stream_link.tif"
## masked and output to mongoDB file names
slopeM = "slope.tif"
filldemM = "dem.tif"
accM = "acc.tif"
streamOrderM = "stream_order.tif"
flowDirDinfM = "flow_dir_angle_dinf.tif"
dirCodeDinfM = "flow_dir_dinf.tif"
slopeDinfM = "slope_dinf.tif"
weightDinfM = "weight_dinf.tif"
subbasinVec = "subbasin.shp"
basinVec = "basin.shp"
chwidthName = "chwidth.tif"
landuseMFile = "landuse.tif"
soilTexture = "soil_texture.tif"
hydroGroup = "hydro_group.tif"
usleK = "usle_k.tif"
initSoilMoist = "moist_in.tif"
depressionFile = "depression.tif"
CN2File = "CN2.tif"
radiusFile = "radius.tif"
ManningFile = "Manning.tif"
velocityFile = "velocity.tif"
## flow time to the main river from each grid cell
t0_sFile = "t0_s.tif"
## standard deviation of t0_s
delta_sFile = "delta_s.tif"
## potential runoff coefficient
runoff_coefFile = "runoff_co.tif"
| SmileEric/SEIMS | preprocess/config.py | Python | gpl-2.0 | 6,003 | 0.01266 |
# -*- coding: utf-8 -*-
# outgoing/service.py
# Copyright (C) 2013-2017 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
OutgoingMail module.
The OutgoingMail class allows to send mail, and encrypts/signs it if needed.
"""
import re
from StringIO import StringIO
from copy import deepcopy
from email.parser import Parser
from email.encoders import encode_7or8bit
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from twisted.mail import smtp
from twisted.internet import defer
from twisted.python.failure import Failure
from twisted.logger import Logger
from leap.common.check import leap_assert_type, leap_assert
from leap.common.events import emit_async, catalog
from leap.bitmask.keymanager.errors import KeyNotFound, KeyAddressMismatch
from leap.bitmask.mail.utils import validate_address
from leap.bitmask.mail.rfc3156 import MultipartEncrypted
from leap.bitmask.mail.rfc3156 import MultipartSigned
from leap.bitmask.mail.rfc3156 import encode_base64_rec
from leap.bitmask.mail.rfc3156 import RFC3156CompliantGenerator
from leap.bitmask.mail.rfc3156 import PGPSignature
from leap.bitmask.mail.rfc3156 import PGPEncrypted
# TODO
# [ ] rename this module to something else, service should be the implementor
# of IService
class OutgoingMail(object):
"""
Sends Outgoing Mail, encrypting and signing if needed.
"""
log = Logger()
def __init__(self, from_address, keymanager, bouncer=None):
"""
Initialize the outgoing mail service.
:param from_address: The sender address.
:type from_address: str
:param keymanager: A KeyManager for retrieving recipient's keys.
:type keymanager: leap.common.keymanager.KeyManager
"""
# assert params
leap_assert_type(from_address, (str, unicode))
leap_assert('@' in from_address)
# XXX it can be a zope.proxy too
# leap_assert_type(keymanager, KeyManager)
self._from_address = from_address
self._keymanager = keymanager
self._bouncer = bouncer
self._senders = []
def add_sender(self, sender):
"""
Add an ISender to the outgoing service
"""
self._senders.append(sender)
def send_message(self, raw, recipient):
"""
Sends a message to a recipient. Maybe encrypts and signs.
:param raw: The raw message
:type raw: str
:param recipient: The recipient for the message
:type recipient: smtp.User
:return: a deferred which delivers the message when fired
"""
d = self._maybe_encrypt_and_sign(raw, recipient)
d.addCallback(self._route_msg, recipient, raw)
d.addErrback(self.sendError, raw)
return d
def can_encrypt_for(self, recipient):
def cb(_):
return True
def eb(failure):
failure.trap(KeyNotFound)
return False
d = self._keymanager.get_key(recipient)
d.addCallbacks(cb, eb)
return d
def sendSuccess(self, dest_addrstr):
"""
Callback for a successful send.
"""
fromaddr = self._from_address
self.log.info('Message sent from %s to %s' % (fromaddr, dest_addrstr))
emit_async(catalog.SMTP_SEND_MESSAGE_SUCCESS,
fromaddr, dest_addrstr)
def sendError(self, failure, origmsg):
"""
Callback for an unsuccessful send.
:param failure: The result from the last errback.
:type failure: anything
:param origmsg: the original, unencrypted, raw message, to be passed to
the bouncer.
:type origmsg: str
"""
# XXX: need to get the address from the original message to send signal
# emit_async(catalog.SMTP_SEND_MESSAGE_ERROR, self._from_address,
# self._user.dest.addrstr)
# TODO when we implement outgoing queues/long-term-retries, we could
# examine the error *here* and delay the notification if it's just a
# temporal error. We might want to notify the permanent errors
# differently.
self.log.error('Error while sending: {0!r}'.format(failure))
if self._bouncer:
self._bouncer.bounce_message(
failure.getErrorMessage(), to=self._from_address,
orig=origmsg)
else:
failure.raiseException()
def _route_msg(self, encrypt_and_sign_result, recipient, raw):
"""
Sends the msg using the ESMTPSenderFactory.
:param encrypt_and_sign_result: A tuple containing the 'maybe'
encrypted message and the recipient
:type encrypt_and_sign_result: tuple
"""
message, recipient = encrypt_and_sign_result
msg = message.as_string(False)
d = None
for sender in self._senders:
if sender.can_send(recipient.dest.addrstr):
self.log.debug('Sending message to %s with: %s'
% (recipient, str(sender)))
d = sender.send(recipient, msg)
break
if d is None:
return self.sendError(Failure(), raw)
emit_async(catalog.SMTP_SEND_MESSAGE_START,
self._from_address, recipient.dest.addrstr)
d.addCallback(self.sendSuccess)
d.addErrback(self.sendError, raw)
return d
def _maybe_encrypt_and_sign(self, raw, recipient, fetch_remote=True):
"""
Attempt to encrypt and sign the outgoing message.
The behaviour of this method depends on:
1. the original message's content-type, and
2. the availability of the recipient's public key.
If the original message's content-type is "multipart/encrypted", then
the original message is not altered. For any other content-type, the
method attempts to fetch the recipient's public key. If the
recipient's public key is available, the message is encrypted and
signed; otherwise it is only signed.
Note that, if the C{encrypted_only} configuration is set to True and
the recipient's public key is not available, then the recipient
address would have been rejected in SMTPDelivery.validateTo().
The following table summarizes the overall behaviour of the gateway:
+---------------------------------------------------+----------------+
| content-type | rcpt pubkey | enforce encr. | action |
+---------------------+-------------+---------------+----------------+
| multipart/encrypted | any | any | pass |
| other | available | any | encrypt + sign |
| other | unavailable | yes | reject |
| other | unavailable | no | sign |
+---------------------+-------------+---------------+----------------+
:param raw: The raw message
:type raw: str
:param recipient: The recipient for the message
:type: recipient: smtp.User
:return: A Deferred that will be fired with a MIMEMultipart message
and the original recipient Message
:rtype: Deferred
"""
# pass if the original message's content-type is "multipart/encrypted"
origmsg = Parser().parsestr(raw)
if origmsg.get_content_type() == 'multipart/encrypted':
return defer.succeed((origmsg, recipient))
from_address = validate_address(self._from_address)
username, domain = from_address.split('@')
to_address = validate_address(recipient.dest.addrstr)
def maybe_encrypt_and_sign(message):
d = self._encrypt_and_sign(
message, to_address, from_address,
fetch_remote=fetch_remote)
d.addCallbacks(signal_encrypt_sign,
if_key_not_found_send_unencrypted,
errbackArgs=(message,))
return d
def signal_encrypt_sign(newmsg):
emit_async(catalog.SMTP_END_ENCRYPT_AND_SIGN,
self._from_address,
"%s,%s" % (self._from_address, to_address))
return newmsg, recipient
def if_key_not_found_send_unencrypted(failure, message):
failure.trap(KeyNotFound, KeyAddressMismatch)
self.log.info('Will send unencrypted message to %s.' % to_address)
emit_async(catalog.SMTP_START_SIGN, self._from_address, to_address)
d = self._sign(message, from_address)
d.addCallback(signal_sign)
return d
def signal_sign(newmsg):
emit_async(catalog.SMTP_END_SIGN, self._from_address)
return newmsg, recipient
self.log.info("Will encrypt the message with %s and sign with %s."
% (to_address, from_address))
emit_async(catalog.SMTP_START_ENCRYPT_AND_SIGN,
self._from_address,
"%s,%s" % (self._from_address, to_address))
d = self._attach_key(origmsg, from_address)
d.addCallback(maybe_encrypt_and_sign)
return d
def _attach_key(self, origmsg, from_address):
filename = "%s-email-key.asc" % (from_address,)
def get_key_and_attach():
d = self._keymanager.get_key(from_address, fetch_remote=False)
d.addCallback(attach_key)
return d
def attach_key(from_key):
msg = origmsg
if not origmsg.is_multipart():
msg = MIMEMultipart()
for h, v in origmsg.items():
msg.add_header(h, v)
msg.attach(MIMEText(origmsg.get_payload(decode=True),
origmsg.get_content_subtype()))
keymsg = MIMEApplication(from_key.key_data, _subtype='pgp-keys',
_encoder=lambda x: x)
keymsg.add_header('content-disposition', 'attachment',
filename=filename)
msg.attach(keymsg)
return msg
self.log.info("Will send %s public key as an attachment."
% (from_address))
d = get_key_and_attach()
d.addErrback(lambda _: origmsg)
return d
def _encrypt_and_sign(self, origmsg, encrypt_address, sign_address,
fetch_remote=True):
"""
Create an RFC 3156 compliang PGP encrypted and signed message using
C{encrypt_address} to encrypt and C{sign_address} to sign.
:param origmsg: The original message
:type origmsg: email.message.Message
:param encrypt_address: The address used to encrypt the message.
:type encrypt_address: str
:param sign_address: The address used to sign the message.
:type sign_address: str
:return: A Deferred with the MultipartEncrypted message
:rtype: Deferred
"""
# create new multipart/encrypted message with 'pgp-encrypted' protocol
def encrypt(res):
newmsg, origmsg = res
d = self._keymanager.encrypt(
origmsg.as_string(unixfrom=False),
encrypt_address, sign=sign_address,
fetch_remote=fetch_remote)
d.addCallback(lambda encstr: (newmsg, encstr))
return d
def create_encrypted_message(res):
newmsg, encstr = res
encmsg = MIMEApplication(
encstr, _subtype='octet-stream', _encoder=encode_7or8bit)
encmsg.add_header('content-disposition', 'attachment',
filename='msg.asc')
# create meta message
metamsg = PGPEncrypted()
metamsg.add_header('Content-Disposition', 'attachment')
# attach pgp message parts to new message
newmsg.attach(metamsg)
newmsg.attach(encmsg)
return newmsg
d = self._fix_headers(
origmsg,
MultipartEncrypted('application/pgp-encrypted'),
sign_address)
d.addCallback(encrypt)
d.addCallback(create_encrypted_message)
return d
def _sign(self, origmsg, sign_address):
"""
Create an RFC 3156 compliant PGP signed MIME message using
C{sign_address}.
:param origmsg: The original message
:type origmsg: email.message.Message
:param sign_address: The address used to sign the message.
:type sign_address: str
:return: A Deferred with the MultipartSigned message.
:rtype: Deferred
"""
# apply base64 content-transfer-encoding
encode_base64_rec(origmsg)
# get message text with headers and replace \n for \r\n
fp = StringIO()
g = RFC3156CompliantGenerator(
fp, mangle_from_=False, maxheaderlen=76)
g.flatten(origmsg)
msgtext = re.sub('\r?\n', '\r\n', fp.getvalue())
# make sure signed message ends with \r\n as per OpenPGP stantard.
if origmsg.is_multipart():
if not msgtext.endswith("\r\n"):
msgtext += "\r\n"
def create_signed_message(res):
(msg, _), signature = res
sigmsg = PGPSignature(signature)
# attach original message and signature to new message
msg.attach(origmsg)
msg.attach(sigmsg)
return msg
dh = self._fix_headers(
origmsg,
MultipartSigned('application/pgp-signature', 'pgp-sha512'),
sign_address)
ds = self._keymanager.sign(
msgtext, sign_address, digest_algo='SHA512',
clearsign=False, detach=True, binary=False)
d = defer.gatherResults([dh, ds])
d.addCallback(create_signed_message)
return d
def _fix_headers(self, msg, newmsg, sign_address):
"""
Move some headers from C{origmsg} to C{newmsg}, delete unwanted
headers from C{origmsg} and add new headers to C{newms}.
Outgoing messages are either encrypted and signed or just signed
before being sent. Because of that, they are packed inside new
messages and some manipulation has to be made on their headers.
Allowed headers for passing through:
- From
- Date
- To
- Subject
- Reply-To
- References
- In-Reply-To
- Cc
Headers to be added:
- Message-ID (i.e. should not use origmsg's Message-Id)
- Received (this is added automatically by twisted smtp API)
- OpenPGP (see #4447)
Headers to be deleted:
- User-Agent
:param msg: The original message.
:type msg: email.message.Message
:param newmsg: The new message being created.
:type newmsg: email.message.Message
:param sign_address: The address used to sign C{newmsg}
:type sign_address: str
:return: A Deferred with a touple:
(new Message with the unencrypted headers,
original Message with headers removed)
:rtype: Deferred
"""
origmsg = deepcopy(msg)
# move headers from origmsg to newmsg
headers = origmsg.items()
passthrough = [
'from', 'date', 'to', 'subject', 'reply-to', 'references',
'in-reply-to', 'cc'
]
headers = filter(lambda x: x[0].lower() in passthrough, headers)
for hkey, hval in headers:
newmsg.add_header(hkey, hval)
del (origmsg[hkey])
# add a new message-id to newmsg
newmsg.add_header('Message-Id', smtp.messageid())
# delete user-agent from origmsg
del (origmsg['user-agent'])
def add_openpgp_header(signkey):
username, domain = sign_address.split('@')
newmsg.add_header(
'OpenPGP', 'id=%s' % signkey.fingerprint,
url='https://%s/key/%s' % (domain, username),
preference='signencrypt')
return newmsg, origmsg
d = self._keymanager.get_key(sign_address, private=True)
d.addCallback(add_openpgp_header)
return d
| leapcode/bitmask-dev | src/leap/bitmask/mail/outgoing/service.py | Python | gpl-3.0 | 17,108 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.